Skip to main content

Custom Locations

This is an enterprise service. Send an inquiry to Unacast about your interest.

Obtain foot traffic for custom locations on demand. This API allows you to submit requests for foot traffic reports on your custom polygons.

These APIs are currently only available over gRPC. Below in the examples you will find the currently supported language clients. Other language clients can be provided on request.

Constraints

  • US locations are supported
  • Up to 500 locations can be provided per report
  • Rate limits apply based on your contract terms. For example, you might be allowed to create 10 reports within a 28-day moving window

Metric Models

 metric_idmore info
foot_traffic_week_202401Foot Traffic w/Visit Length
foot_traffic_month_202401Foot Traffic w/Visit Length
popular_times_202408Popular Times (limitation of max 70k sqm per polygon)
trade_areas_quarterly_202401Trade Area
trade_areas_zip_quarterlyTrade Area
demographics_quarterly_202401Visitor Demographics
visitor_journey_202503Visitor Journey
spatialai_personalive_quarterly_202401Premium option. Visitor Phychographics with SpatialAI PersonAlive segments.

Metric Status

StatusDescription
UNSPECIFIEDUnknown reason. Please contact Unacast for more information.
RUNNINGThe metric is being processed.
PRODUCEDThe metric has been produced and will be available very soon. System is finalizing processing before it is ready to use.
FINISHEDThe metric is produced successfully.
REJECTEDVarious rejected reasons depend on your input. Check details in metric_status_reason attribute.
FAILEDVarious failed reasons depend on metric generation process. Check details in metric_status_reason attribute.
DELAYEDVarious delayed reasons depend on your metric. Check details in metric_status_reason attribute.

Examples

Python

pre-requisites

  • install the python-language client: pip install unacatlib

1. Create your report

import datetime
from unacatlib.byo_external import BYOApiClient, ReportJob

# Sample geo json, currently accepting only geojson of **FeatureCollection** of **Polygon** types.
poi_geo_json = '''{
"type": "FeatureCollection",
"features": [
{
"type": "Feature",
"properties": {
"name": "my-given-location-name"
},
"geometry": {
"type": "Polygon",
"coordinates": [
[
[-122.276737859, 37.8439032810001],
[-122.276899008, 37.8438818770001],
[-122.276870739, 37.84375312],
[-122.276741817, 37.8437702410001],
[-122.276711462, 37.8437830560001],
[-122.276737859, 37.8439032810001]
]
]
}
}
]
}'''

def main():

with BYOApiClient(
billing_account="<ID you'll be provided>",
token="<Token you'll be provided>",
) as client:
metric_ids = ["<metric_id of your choice>"]
response = client.create_us_report(
pois=poi_geo_json,
metric_ids=metric_ids,
start_date=datetime.date(year=2024, month=3, day=1),
end_date=datetime.date(year=2024, month=3, day=31)
)
print(f"Use report_id: {response.id} and metric_ids: {response.metric_ids} to read your report later.")


if __name__ == "__main__":
main()

2. Read your report

import pandas as pd
from unacatlib.byo_external import BYOApiClient, MetricStatus


report_id = "<report id you get from the previous step>"
metric_id = "<metric_id of your request>"

def metric_values_to_df(df):
data_pydict = df.to_pydict()

# Extracting the values into a structured format
records = []

for entry in data_pydict['values']:
# Extract base metadata
base_record = {
'metric_id': entry.get('metricId'),
'feature_id': entry['mapFeatureV2'].get('featureId'),
'display_name': entry['mapFeatureV2'].get('name'),
'observation_start_date': f"{entry['observationPeriod']['start']['year']}-{entry['observationPeriod']['start']['month']:02d}-{entry['observationPeriod']['start']['day']:02d}",
'observation_end_date': f"{entry['observationPeriod']['end']['year']}-{entry['observationPeriod']['end']['month']:02d}-{entry['observationPeriod']['end']['day']:02d}",
}

# Add dimensions to the record
for dimension in entry.get('dimensions', []):
base_record[f"dimension_{dimension['dimensionId']}"] = dimension.get('value')
base_record[f"dimension_{dimension['dimensionId']}_name"] = dimension.get('displayName')

# Extract values dynamically into a single row
row = base_record.copy()
for value in entry.get('values', []):
name = value.get('name')

if 'count' in value:
row[name] = int(value['count'])
elif 'number' in value:
row[name] = float(value['number'])
else:
row[name] = None # fallback if neither exists

records.append(row)


# Create a pandas DataFrame
df = pd.DataFrame(records)

# Convert date columns to datetime objects
df['observation_start_date'] = pd.to_datetime(df['observation_start_date'])
df['observation_end_date'] = pd.to_datetime(df['observation_end_date'])
return df


def main():
with BYOApiClient(
billing_account="<ID you'll be provided>",
token="<Token you'll be provided>"
) as client:

metric_values = client.read_us_report(report_id=report_id, metric_id=metric_id)
if metric_values.metric_status == MetricStatus.FINISHED:
df = metric_values_to_df(metric_values)
output_path = f"{report_id}_{metric_id}.csv"
df.to_csv(output_path, index=False)
else:
print(f'metric status: {MetricStatus(metric_values.metric_status).name}')
print(f'metric status reason: {metric_values.metric_status_reason}')


if __name__ == "__main__":
main()

Notebook example

Link to colab notbook example