https://colab.research.google.com/assets/colab-badge.svg

EarthRanger IO#

Setup#

Ecoscope#

[ ]:
# !pip install ecoscope
[ ]:
import getpass
import json
import os
import sys
import uuid

import geopandas as gpd
import pandas as pd
from shapely.geometry import Point

import ecoscope

ecoscope.init()

Google Drive Setup#

[ ]:
output_dir = "Ecoscope-Outputs"

if "google.colab" in sys.modules:
    from google.colab import drive

    drive.mount("/content/drive/", force_remount=True)
    output_dir = os.path.join("/content/drive/MyDrive/", output_dir)

os.makedirs(output_dir, exist_ok=True)

Connect to EarthRanger#

[ ]:
ER_SERVER = os.getenv("ER_SERVER", "https://sandbox.pamdas.org")
ER_USERNAME = os.getenv("ER_USERNAME", "admin")

ER_PASSWORD = os.getenv("ER_PASSWORD", "")

if not ER_PASSWORD:
    ER_PASSWORD = getpass.getpass("Please enter your ER password: ")

er_io = ecoscope.io.EarthRangerIO(
    server=ER_SERVER,
    username=ER_USERNAME,
    password=ER_PASSWORD,
    tcp_limit=5,
    sub_page_size=4000,
)

GET Event#

By event_ids#

Use this approach to download an individual Event if you know its id. Downloading from multiple ids at once is currently unsupported in ER.

[ ]:
er_io.get_events(
    event_ids="1ead840a-0545-480d-85e1-97e90a5cd03e",
)

By event_type#

Use this approach if you want to download events of a given event type (using the EventType ID from ER)

[ ]:
er_io.get_events(
    event_type=["2adf7302-ecef-42af-9f7c-45992ce52dd0"],
    filter=json.dumps(
        {
            "date_range": {
                "lower": pd.Timestamp("2022-05", tz="utc").isoformat(),
                "upper": pd.Timestamp.utcnow().isoformat(),
            }
        }
    ),
)

Unpack event_details column of JSON data into individual columns#

[ ]:
events_df = er_io.get_events(event_type=["2adf7302-ecef-42af-9f7c-45992ce52dd0"])
events_df.event_details
[ ]:
er_io._normalize_column(events_df, "event_details")
events_df

POST Event#

Define events to be uploaded:

[ ]:
new_events = [
    {
        "event_details": {
            "carcassrep_ageofanimal": None,
            "carcassrep_ageofcarcass": None,
            "carcassrep_causeofdeath": None,
            "carcassrep_sex": None,
            "carcassrep_species": None,
            "carcassrep_trophystatus": None,
        },
        "event_type": "carcass_rep",
        "icon_id": "carcass_rep",
        "id": "e29f9078-ee0a-4f06-b685-92e9ff266e9b",
        "location": {"latitude": -27.12759, "longitude": -109.40804},
        "priority": 200,
        "state": "resolved",
        "time": pd.Timestamp("2022-03-15 15:44:00-0700"),
        "title": "Carcass",
    },
    {
        "event_details": {
            "carcassrep_ageofanimal": "subadult",
            "carcassrep_ageofcarcass": "less_than_a_day",
            "carcassrep_causeofdeath": "unnaturalshot",
            "carcassrep_sex": "male",
            "carcassrep_species": "elephant",
            "carcassrep_trophystatus": "removed",
        },
        "event_type": "carcass_rep",
        "icon_id": "carcass_rep",
        "id": str(uuid.uuid4()),
        "location": {"latitude": -27.11567, "longitude": -109.33147},
        "priority": 300,
        "state": "new",
        "time": pd.Timestamp.utcnow() - pd.Timedelta(hours=1),
        "title": "Carcass",
    },
]

Read events as dataframe:

[ ]:
new_events_df = pd.DataFrame.from_dict(new_events)
new_events_df

Upload:

[ ]:
try:
    er_io.post_event(events=new_events_df.iloc[[0]])
except ecoscope.contrib.dasclient.DasClientException as e:
    print(e)

PATCH Event#

[ ]:
updated_event = pd.DataFrame(
    [
        {
            "priority": 200,
            "state": "active",
            "location": {
                "longitude": "-109.25454848840066",
                "latitude": "-27.09529454271872",
            },
        }
    ]
)

Patch

[ ]:
er_io.patch_event(event_id="d3fe3a66-b75c-44a8-8fd6-5bd01905dd00", events=updated_event)

GET Subjects#

by id#

[ ]:
er_io.get_subjects(id="216b033d-c4ad-444a-9f41-baa6c97cde7d")

By SubjectGroup group_name#

[ ]:
er_io.get_subjects(group_name="Rhinos")

GET Observations#

The output is an Ecoscope Relocations dataframe that has a core set of columns: groupby_col, fixtime, junk_status.

All other columns are prepended by extra__. The groupby_col will be the subject_id and the index of the dataframe is the observation indices.

Classic ER Observation format can be returned by passing in parameter relocations=False.

Filter observation by setting filter to one of: None, 0, 1, 2 or 3 - None returns everything - 0 filters out everything but observations with exclusion flag 0 (Pass back clean data) - 1 filters out everything but observations with exclusion flag 1 (Pass back manually filtered data) - 2 filters out everything but observations with exclusion flag 2 (Ppass back automatically filtered data) - 3 filters out everything but observations with exclusion flag 2 or 1 (Pass back both manual and automatic filtered data)

[ ]:
since = pd.Timestamp.utcnow() - pd.Timedelta(days=365)
until = pd.Timestamp.utcnow()

By SubjectGroup group_name:#

[ ]:
relocs = er_io.get_subjectgroup_observations(
    group_name="Rhinos",
    include_inactive=True,
    include_subject_details=True,
    include_source_details=True,
    include_subjectsource_details=True,
    since=since.isoformat(),
    until=until.isoformat(),
    filter=0,
    include_details=True,
)
relocs

By subject_id#

[ ]:
relocs = er_io.get_subject_observations(
    subject_ids=[
        "216b033d-c4ad-444a-9f41-baa6c97cde7d",
        "aca64374-a102-4ef5-9b58-60fd0bf64a61",
    ],
    include_subject_details=True,
    include_source_details=True,
    include_subjectsource_details=True,
    since=since,
    until=until,
    filter=0,
    include_details=True,
)
relocs

By source_id#

[ ]:
relocs = er_io.get_source_observations(
    source_ids=[
        "d8f92f6e-1121-4833-b11c-6fc1ca334ff0",
        "f46b6e92-a09d-41dd-bc42-8244870189fd",
    ],
    include_source_details=True,
    since=since,
    until=until,
    filter=0,
    include_details=True,
)
relocs

GET Patrols#

[ ]:
now = pd.Timestamp.utcnow().isoformat()

er_io.get_patrols(
    filter=json.dumps(
        {
            "date_range": {"upper": now},
            "patrol_type": ["c6f88fd2-2b87-477a-9c23-3bc4b3eb845d"],
        }
    )
)

GET Observations for a Patrol#

[ ]:
patrol_df = er_io.get_patrols()

relocs = er_io.get_observations_for_patrols(
    patrol_df,
    include_source_details=False,
    include_subject_details=False,
    include_subjectsource_details=False,
)

GET Users#

[ ]:
df = er_io.get_users()
df

POST Observation#

Upload observations for existing source

[ ]:
observations = [
    {
        "fixtime": pd.Timestamp.utcnow().isoformat(),
        "geometry": Point(0, 0),
        "source_id": "d8f92f6e-1121-4833-b11c-6fc1ca334ff0",
    },
    {
        "fixtime": pd.Timestamp.utcnow().isoformat(),
        "geometry": Point(0, 0),
        "source_id": "d8f92f6e-1121-4833-b11c-6fc1ca334ff0",
    },
    {
        "fixtime": pd.Timestamp.utcnow().isoformat(),
        "geometry": Point(1, 1),
        "source_id": "f46b6e92-a09d-41dd-bc42-8244870189fd",
    },
]

gdf = gpd.GeoDataFrame.from_dict(observations)
gdf

Post:

[ ]:
er_io.post_observations(observations=gdf, source_id_col="source_id", recorded_at_col="fixtime")

POST SubjectSource#

[ ]:
er_io.post_subjectsource(
    subject_id="216b033d-c4ad-444a-9f41-baa6c97cde7d",
    source_id="d8f92f6e-1121-4833-b11c-6fc1ca334ff0",
    lower_bound_assignend_range=pd.Timestamp.utcnow().isoformat(),
    upper_bound_assigned_range=(pd.Timestamp.utcnow() + pd.Timedelta(days=30)).isoformat(),
    additional={},
)

Export to File#

Create test data#

Skip this cell if you would like to provide your own data

[ ]:
since = pd.Timestamp.utcnow() - pd.Timedelta(days=30)
until = pd.Timestamp.utcnow()

relocs = er_io.get_subjectgroup_observations(
    group_name="Rhinos",
    include_inactive=True,
    include_details=True,
    include_subject_details=True,
    include_source_details=True,
    include_subjectsource_details=True,
    since=since,
    until=until,
    filter=0,
)

GeoPackage (.gpkg)#

(GeoPackage does not support columns with type list so we drop them.)

[ ]:
relocs.drop(
    columns=relocs.columns[relocs.applymap(lambda x: isinstance(x, list)).any()],
    errors="ignore",
    inplace=True,
)

relocs.to_file(os.path.join(output_dir, "easter_island.gpkg"), layer="easter_island")

CSV#

[ ]:
relocs.to_csv(os.path.join(output_dir, "observations.csv"))

Python Pickle (.pkl)#

[ ]:
relocs.to_pickle(os.path.join(output_dir, "observations.pkl"))

Apache Feather (.feather)#

[ ]:
relocs.to_feather(os.path.join(output_dir, "observations.feather"))