Skip to content

Workflows #1

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 9 commits into from
Jun 2, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
86 changes: 86 additions & 0 deletions .github/workflows/build.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
name: Build Jobs

on: [ push ]

# Build all of the projects and run them against the CASE graph

jobs:
dotnet:
runs-on: ubuntu-latest
steps:

# Get the code from the repository
- name: Get Repo
uses: actions/checkout@v3

# Install the .NET SDK
- name: Setup .NET
uses: actions/[email protected]
with:
dotnet-version: 6.x

# Build the project
- name: Build
run: dotnet build
working-directory: ./dotnet/

# Run the built CASE conversion example
- name: Run CASE Conversion
run: |
dotnet run ../data/geo.json ../output/dotnet.geojson
cat ../output/dotnet.geojson
working-directory: ./dotnet/

python:
runs-on: ubuntu-latest
steps:

# Get the code from the repository
- name: Get Repo
uses: actions/checkout@v3

# Install Python
- name: Setup Python
uses: actions/[email protected]
with:
python-version: 3.9

# Install the rdflib dependency
- name: Install Dependencies
run: pip3 install rdflib

# Run the Python CASE conversion example
- name: Run CASE Conversion
run: |
ls
python3 CASE2GeoJSON.py ../data/geo.json ../output/python.geojson
cat ../output/python.geojson
working-directory: ./python/

java:
runs-on: ubuntu-latest
steps:

# Get the code from the repository
- name: Get Repo
uses: actions/checkout@v3

# Install Java
- name: Setup Java
uses: actions/[email protected]
with:
distribution: 'temurin'
java-version: '18'
cache: 'maven'

# Build the project
- name: Build Project
run: mvn compile assembly:single
working-directory: ./java/case2geo/

# Run the Java CASE conversion example
- name: Run CASE Conversion
run: |
java -jar ./target/case2geo-0.1.0.jar ../../data/geo.json ../../output/java.geojson
cat ../../output/java.geojson
working-directory: ./java/case2geo/
22 changes: 22 additions & 0 deletions .github/workflows/case_validate.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
name: CASE Validate

on: [ push ]

# Ensure all of the files in /data/ are valid CASE graphs.

jobs:
validate:
runs-on: ubuntu-latest
steps:

# Get the code from the repository to be packaged
- name: Get Repo
uses: actions/checkout@v3

# Validate the CASE graphs in the /data/ directory
- name: CASE Validate
uses: kchason/[email protected]
with:
case-path: "./data/"
case-version: "case-1.2.0"
extension-filter: "json"
20 changes: 20 additions & 0 deletions .github/workflows/lint.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
name: Lint

on: [ push ]

# Build all of the projects and run them against the CASE graph

jobs:
lint:
runs-on: ubuntu-latest
steps:

# Get the code from the repository
- name: Get Repo
uses: actions/checkout@v3

# Install pre-commit and run it
- name: Pre-commit Checks
run: |
pip -q install pre-commit
pre-commit run --all-files
1 change: 0 additions & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@ repos:
- id: end-of-file-fixer
- id: check-yaml
- id: check-added-large-files
- id: check-case-conflict
- id: check-merge-conflict
- id: check-json
- id: check-xml
Expand Down
4 changes: 2 additions & 2 deletions java/case2geo/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,8 @@

<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<maven.compiler.source>1.18</maven.compiler.source>
<maven.compiler.target>1.18</maven.compiler.target>
<maven.compiler.source>18</maven.compiler.source>
<maven.compiler.target>18</maven.compiler.target>
<exec.mainClass>org.caseontology.examples.CASE2Geo</exec.mainClass>
</properties>

Expand Down
86 changes: 86 additions & 0 deletions python/CASE2GeoJSON.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
import json
import sys
from os.path import exists, isdir, isfile

from geotypes import GeoRecord
from geoutilities import records_to_geojson, remove_nulls
from rdflib import Graph

# Parse the arguments from the CLI to get the input and output filenames
if len(sys.argv) != 3:
print("Usage: python case2geojson.py <input-file> <output-file>")
sys.exit(1)

input_filename: str = sys.argv[1]
output_filename: str = sys.argv[2]

# Ensure the input file exists
if not exists(input_filename) and not isfile(input_filename):
print(f"File not found: {input_filename}")
sys.exit(1)

# Ensure the output directory exists
output_directory: str = output_filename[: output_filename.rfind("/")]
if not exists(output_directory) and not isdir(output_directory):
print(f"Directory not found: {output_directory}")
sys.exit(1)

# Build the rdflib graph from the input file
graph: Graph = Graph()
graph.parse(input_filename)

# Write the SPARQL query to get the data from the graph
query: str = """
SELECT ?lLatitude ?lLongitude ?lAddressType ?lCountry ?lLocality ?lPostalCode ?lRegion ?lStreet
WHERE
{
?nLocation a uco-location:Location .
OPTIONAL
{
?nLocation uco-core:hasFacet ?nLatLongFacet .
?nLatLongFacet a uco-location:LatLongCoordinatesFacet .
OPTIONAL { ?nLatLongFacet uco-location:latitude ?lLatitude . }
OPTIONAL { ?nLatLongFacet uco-location:longitude ?lLongitude . }
}

OPTIONAL {
?nLocation uco-core:hasFacet ?nSimpleAddressFacet .
?nSimpleAddressFacet a uco-location:SimpleAddressFacet .
OPTIONAL { ?nSimpleAddressFacet uco-location:addressType ?lAddressType . }
OPTIONAL { ?nSimpleAddressFacet uco-location:country ?lCountry . }
OPTIONAL { ?nSimpleAddressFacet uco-location:locality ?lLocality . }
OPTIONAL { ?nSimpleAddressFacet uco-location:postalCode ?lPostalCode . }
OPTIONAL { ?nSimpleAddressFacet uco-location:region ?lRegion . }
OPTIONAL { ?nSimpleAddressFacet uco-location:street ?lStreet . }
}
}
"""

results = graph.query(query)

# Define the list of GeoRecords
records: list[GeoRecord] = []

# Loop through the results and add them to the list of GeoRecords if the latitude and longitude are present
for row in results:
geo_record: GeoRecord = GeoRecord()
geo_record.Latitude = row.lLatitude
geo_record.Longitude = row.lLongitude
geo_record.AddressType = row.lAddressType
geo_record.Country = row.lCountry
geo_record.Locality = row.lLocality
geo_record.PostalCode = row.lPostalCode
geo_record.Region = row.lRegion
geo_record.Street = row.lStreet
records.append(geo_record)

# Convert the data to a GeoJSON structured object
geoJSON = records_to_geojson(records)

# Remove null values from the GeoJSON object
geoDict: dict = geoJSON.reprJSON()
geoDict = remove_nulls(geoDict)

# Write the GeoJSON object to the output file
with open(output_filename, "w") as output_file:
output_file.write(json.dumps(geoDict, indent=4))