Commit 291eb76c authored by Christine Plumejeaud's avatar Christine Plumejeaud
Browse files

API is tested locally. You can get metadata, or travels, or pointcalls data.

Using postgres, portic_v3 with the user api_user, password = 'portic'. READ only user.
parent 9a92d145
......@@ -40,7 +40,21 @@ Backend is running and serve the data on http://localhost:5004 (change the PORT
### 1.1.2. Running with Apache
todo
```bash
plumegeo@cchum-kvm-plume:~/navigo/Viz/porticapi$ git fetch origin master
plumegeo@cchum-kvm-plume:~/navigo/Viz/porticapi$ git reset --hard origin/master
plumegeo@cchum-kvm-plume:~/navigo/Viz/porticapi$ cd ..
plumegeo@cchum-kvm-plume:~/navigo/Viz$ cp porticapi/porticapi/apidata.py porticapi/porticapi/__init__.py
sudo chown :www-data /home/plumegeo/navigo/Viz/porticapi/ -R
sudo chmod 755 /home/plumegeo/navigo/Viz/porticapi/ -R
for fic in $(find /home/plumegeo/navigo/Viz/porticapi/porticapi -type f -name "*.py"); do sudo dos2unix $fic; done
sudo service apache2 reload
```
### 1.1.3. Debug things
......@@ -70,19 +84,10 @@ Si le parametre n'est pas précisé, le serveur opte pour la valeur de paramètr
URL : **http://data.portic.fr/api/**
```py
def formatCSV2_deprecated(data):
dest = io.StringIO()
dest.write('[toto, 1, tutu]')
output = make_response(dest.getvalue())
#output = excel.make_response_from_array(data, 'csv')
output.headers["Content-Disposition"] = "attachment; filename=export.csv"
output.headers["Content-type"] = "text/csv"
return output
```
### 1.2.2. /fieldnames?
--- TOUT en json
--- TOUT en json ou csv
- API = pointcalls | travels | **any**
......@@ -114,6 +119,9 @@ Exemple
#### 1.2.4.2. /travels?link_to_port="UHGS_id du port"&both-to=true
TODO not done yet
Donnerait les trajets passant par le port en question, et toutes les escales.
sans doublons par défaut
avec doublons si both-to=true
......@@ -123,6 +131,8 @@ avec doublons si both-to=true
#### 1.2.4.3. /travels?link_to_port="UHGS_id du port"&degree=0&both-to=true
TODO not done yet
Donnerait les trajets entrants ou partants du port en question,
sans doublons, seulement les escales directement précédentes ou suivantes
sans doublons par défaut
......
......@@ -19,11 +19,13 @@ import csv
import json
import io
import os
import psycopg2 as pg
#import flask_ext
#import flask_excel as excel
#import pyexcel as pe
APP_ROOT = os.path.dirname(os.path.abspath(__file__)) # refers to application_top
APP_STATIC = os.path.join(APP_ROOT, 'static')
APP_DATA = os.path.join(APP_STATIC, 'data')
......@@ -34,34 +36,29 @@ CORS(app)
#app.config.from_object('config')
#port = app.config['PORT']
port = '80'
postgresport = '8004'
def isInside(lat, lon, radius, x, y):
"""
Will be computed by postgres at the end of the afternoon
"""
#radius is in degree in formula
if ((x - lat) * (x - lat) +
(y - lon) * (y - lon) <= radius * radius):
return True
else:
return False
def calcul_isInside(lat, lon, radius):
def retrieveDataFromPostgres(query) :
"""
Will be computed by postgres at the end of the afternoon
Internal method to select data using SQL query
return a dataframe
"""
import pandas.io.sql as psql
import pandas as pd
df = pd.read_csv('static/data/travels_API_11mai2020.csv', sep = ';')
#radius in km is converted in radius in degree
calculated = df.apply(lambda row:isInside(lat,lon,radius/111.19492664455873,row.departure_latitude,row.departure_longitude), axis=1)
return df.loc[calculated]
#connection = pg.connect("host='134.158.33.179' port='5433' dbname='portic_v3' user='api_user' password='portic'")
connection = pg.connect("""host='localhost' port='%s' dbname='portic_v3' user='api_user' password='portic'"""% postgresport)
df = pd.read_sql_query(query,con=connection)
connection.close()
return df
#print(df)
def formatCSV(mydataframe):
"""
Internal method to output dataframe in a CSV file
"""
#print(mydataframe)
#sheet = pe.Sheet(data)
#sheet.save_to_memory("csv", dest)
#Options de compression possibles to_csv
#https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.to_csv.html
......@@ -75,16 +72,24 @@ def formatCSV(mydataframe):
output.headers["Content-type"] = "text/csv"
return output
def formatJSON(data):
json_str = json.dumps(data.to_json(orient='records'))
def formatJSON(dataframe):
"""
Internal method to output dataframe as JSON
"""
json_str = json.dumps(dataframe.to_json(orient='records'))
return json.loads(json_str)
def formatOutput(dfcsv):
def formatOutput(dfcsv, api='travels'):
"""
Apply various formatting on output by processing request parameters
Internal method
Apply various formatting on dataframe for output by processing request parameters
- format : csv | **json**
- zipped : true | **false**
- shortenfields : true | **false**
NB :
- shortenfields : the shortname is based on the ordinal position of the attribute in the table
thus it can change with living database
- zipped is not yet implemented
"""
import pandas as pd
......@@ -94,12 +99,13 @@ def formatOutput(dfcsv):
#print('shortenfields ? '+shortenfields)
if (shortenfields != None and shortenfields=='true') :
#API;colname;short_colname
filename = os.path.join(APP_DATA, 'shorten_names.csv')
mapnames = pd.read_csv(filename, sep = ';')
#filename = os.path.join(APP_DATA, 'shorten_names.csv')
#mapnames = pd.read_csv(filename, sep = ';')
mapnames = readFieldnames(api)
#Filter according API
mapnames = mapnames[mapnames['API']=='pointcalls']
#mapnames = mapnames[mapnames['API']=='pointcalls']
#print(mapnames['colname'])
mapper=mapnames.loc[:, ['colname', 'short_colname'] ].set_index('colname')['short_colname'].to_dict()
mapper=mapnames.loc[:, ['name', 'shortname'] ].set_index('name')['shortname'].to_dict()
#print(mapper)
dfcsv = dfcsv.rename(columns=mapper)
......@@ -110,24 +116,64 @@ def formatOutput(dfcsv):
else:
return formatJSON(dfcsv)
@app.route('/api/fieldnames/')
def getFieldnames():
def readFieldnames(api) :
"""
récupère des métadonnées sur l'API, avec la liste des attributs, avec leur nom court et long, leur type et leur signification.
get metadata about API with short and long name, type and definition
Internal method
We read the information schema to be sure to be conform to real living database
Name of tables differ from API names.
We generate the shortname using the order of the attribute in the table
(3 characters, beginning either with t if travels, either with p if pointcalls)
"""
import pandas as pd
table_name = "pointcall','built_travels"
if api is not None :
if api == 'travels' :
table_name = 'built_travels'
if api == 'pointcalls' :
table_name = 'pointcall'
#API;name;shortname;type;description
query = """SELECT case when c.table_name= 'built_travels' then 'travels' else 'pointcalls' end as API,
c.column_name as name,
case when c.table_name= 'built_travels' then 't' else 'p' end||navigo.pystrip(to_char(c.ordinal_position::int, '09')) as shortname,
c.data_type as type, pgd.description as description
FROM information_schema.columns c
left outer join pg_catalog.pg_description pgd on (pgd.objsubid=c.ordinal_position )
left outer join pg_catalog.pg_statio_all_tables st on (pgd.objoid=st.relid and c.table_schema=st.schemaname and c.table_name=st.relname)
where c.table_name in ('%s') and c.table_schema = 'navigoviz' and pgd.objoid = st.relid;"""% (table_name)
#print(query)
metadata = retrieveDataFromPostgres(query)
return metadata
"""
filename = os.path.join(APP_DATA, 'api_portic.csv')
dfcsv = pd.read_csv(filename, sep = ';')
# Filter to keep desired API
api = request.args.get("api")
#print(getduplicates)
if api is not None :
#Filter to retain this API
dfcsv = dfcsv[dfcsv['API']==api]
return dfcsv
"""
@app.route('/api/fieldnames/')
def getFieldnames():
"""
récupère des métadonnées sur l'API, avec la liste des attributs, avec leur nom court et long, leur type et leur signification.
get metadata about API with short and long name, type and definition
http://127.0.0.1:80/api/fieldnames/?format=json
http://127.0.0.1/api/fieldnames/?format=json&shortenfields=true
http://127.0.0.1/api/fieldnames/?format=json&shortenfields=true&api=pointcalls
http://127.0.0.1/api/fieldnames/?format=csv&shortenfields=true&api=pointcalls
"""
# Filter to keep desired API
api = request.args.get("api")
df = readFieldnames(api)
return formatOutput(dfcsv)
return formatOutput(df, api)
@app.route('/api/pointcalls/')
def getPointcalls():
......@@ -136,22 +182,29 @@ def getPointcalls():
Will be extracted from postgres, schema navigoviz, table pointcall (see navigocorpus/ETL)
- params : **all** | tableau des noms longs des attributs de l'API à renvoyer
http://127.0.0.1:5004/api/pointcalls/?params=pointcall,pointcall_uhgs_id&shortenfields=true
http://127.0.0.1:5004/api/pointcalls/?format=csv
http://127.0.0.1:80/api/pointcalls/?params=pointcall,pointcall_uhgs_id&shortenfields=true
http://127.0.0.1:80/api/pointcalls/?format=csv
http://127.0.0.1:80/api/pointcalls/?params=pointcall,pointcall_uhgs_id&shortenfields=false
http://127.0.0.1:80/api/pointcalls/?format=json&params=id,pointcall,ship_name,destination,destination_uhgs_id&shortenfields=true
"""
import pandas as pd
filename = os.path.join(APP_DATA, 'pointcalls_API_11mai2020.csv')
dfcsv = pd.read_csv(filename, sep = ';')
#filename = os.path.join(APP_DATA, 'pointcalls_API_11mai2020.csv')
#dfcsv = pd.read_csv(filename, sep = ';')
#https://stackoverflow.com/questions/24251219/pandas-read-csv-low-memory-and-dtype-options
#pd.read_csv('static/data/pointcalls_API_11mai2020.csv', dtype={"all_cargos": object, "pkid": int})
#print(dfcsv.columns)
query = 'select * from navigoviz.pointcall'
## Filter the result according requested params
params = request.args.get("params")
#params=pointcall,pointcall_uhgs_id
if (params is not None and len(params)>0) :
#print('selecting some columns')
fields = readFieldnames('pointcalls')
keepparams = str(params).split(',')
#keepparams = ['pkid', 'pointcall', 'pointcall_uhgs_id']
#https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#deprecate-loc-reindex-listlike
......@@ -159,12 +212,17 @@ def getPointcalls():
#labels = dfcsv.index.intersection(keepparams)
labels = []
for k in keepparams:
if k in dfcsv.columns :
#print(k)
if k in fields['name'].tolist() :
labels.append(k)
#print(labels)
dfcsv = dfcsv.loc[:, labels]
attributes = ",".join(labels)
query = 'select '+attributes+' from navigoviz.pointcall'
#dfcsv = dfcsv.loc[:, labels]
dfcsv = retrieveDataFromPostgres(query)
return formatOutput(dfcsv)
return formatOutput(dfcsv, 'pointcalls')
......@@ -175,34 +233,48 @@ def getTravels():
Will be extracted from postgres, schema navigoviz, table built_travels (see navigocorpus/ETL),
but with a filter by default : only source_entry = from and both-from, to avoid duplicates
- params : **all** | tableau des noms longs des attributs de l'API à renvoyer
params=pointcall,pointcall_uhgs_id for instance
- both_to : true | **false**
http://127.0.0.1:5004/api/travels/?format=csv&both_to=false
http://127.0.0.1:80/api/travels/?format=csv&both_to=true&shortenfields=true
http://127.0.0.1:80/api/travels/?format=json&params=id,departure,destination,destination_uhgs_id
http://127.0.0.1:80/api/travels/?format=json&params=id,departure,destination,destination_uhgs_id&shortenfields=true
"""
import pandas as pd
filename = os.path.join(APP_DATA, 'travels_API_11mai2020.csv')
dfcsv = pd.read_csv(filename, sep = ';')
#filename = os.path.join(APP_DATA, 'travels_API_11mai2020.csv')
#dfcsv = pd.read_csv(filename, sep = ';')
query = 'select * from navigoviz.built_travels'
## Filter the result according requested params
params = request.args.get("params")
#print(params)
if (params != None) :
#params=pointcall,pointcall_uhgs_id
keepparams = params.split(',')
#keepparams = ['pkid', 'pointcall', 'pointcall_uhgs_id']
dfcsv = dfcsv.loc[:, keepparams]
if (params is not None and len(params)>0) :
#print('selecting some columns')
fields = readFieldnames('travels')
keepparams = str(params).split(',')
#https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#deprecate-loc-reindex-listlike
labels = []
for k in keepparams:
if k in fields['name'].tolist() :
labels.append(k)
attributes = ",".join(labels)
query = 'select '+attributes+' from navigoviz.built_travels'
# Filter to remove duplicates (by default) - if both_to is given, then do not filter and travels will contain duplicates
getduplicates = request.args.get("both_to")
#print(getduplicates)
if getduplicates is None or getduplicates != 'true' :
#Filter travels
#print('filtering duplicates out of travels')
dfcsv = dfcsv[dfcsv['source_entry']!='both-to']
return formatOutput(dfcsv)
#dfcsv = dfcsv[dfcsv['source_entry']!='both-to']
query = query + " where source_entry <> 'both-to'"
dfcsv = retrieveDataFromPostgres(query)
return formatOutput(dfcsv, 'travels')
......@@ -213,54 +285,122 @@ def getDeparturesDetails():
Return the travels, at the departure of the points located in a 100 km radius neighbourhood from the lat/lon given in parameter
Will be extracted from postgres, schema navigoviz, table built_travels (see navigocorpus/ETL),
but with a filter by default : only source_entry = from and both-from, to avoid duplicates
"""
lat = float(request.args.get("lat"))
lon = float(request.args.get("lon"))
radius = int(request.args.get("radius"))
df1 = calcul_isInside(lat, lon, radius)
dfcsv = df1[['departure','departure_uhgs_id','departure_latitude','departure_longitude']].drop_duplicates()
return formatOutput(dfcsv)
http://localhost:80/api/details/departures?lat=46&lon=-1&radius=100
http://localhost/api/details/departures/?lat=45.2333&lon=-1.5&radius=100
http://localhost/api/details/departures/?lat=45.2333&lon=toto&radius=100
"""
lat = None
lon = None
radius = None
try:
lat = float(request.args.get("lat"))
lon = float(request.args.get("lon"))
radius = int(request.args.get("radius"))
except ValueError:
#We go on, silenciously, by selecting all data
pass
if lat != None and lon != None and radius!= None :
query = """select distinct departure, departure_uhgs_id, departure_latitude, departure_longitude from navigoviz.built_travels
where source_entry<> 'both-to' and
st_distance(departure_point,
st_setsrid(st_transform(st_setsrid(st_makepoint(%f, %f), 4326), 3857), 3857)) < %f""" % (lon,lat,radius*1000)
else :
query = """select distinct departure, departure_uhgs_id, departure_latitude, departure_longitude
from navigoviz.built_travels
where source_entry<> 'both-to'"""
#df1 = calcul_isInside(lat, lon, radius)
#dfcsv = df1[['departure','departure_uhgs_id','departure_latitude','departure_longitude']].drop_duplicates()
#print(query)
dfcsv = retrieveDataFromPostgres(query)
#return json.loads(json.dumps(df1[['departure','departure_uhgs_id','departure_latitude','departure_longitude']].drop_duplicates().to_json(orient='records')))
return formatOutput(dfcsv, 'travels')
@app.route('/api/agg/departures/', methods = ['GET'])
def getDeparturesAgg():
"""
Return the count of departures, for the points located in a 100 km radius neighbourhood from the lat/lon given in parameter
Return the count of departures, for the points located in a radius km neighbourhood from the lat/lon given in parameter
Will be extracted from postgres, schema navigoviz, table built_travels (see navigocorpus/ETL),
but with a filter by default : only source_entry = from and both-from, to avoid duplicates
"""
lat = float(request.args.get("lat"))
lon = float(request.args.get("lon"))
radius = int(request.args.get("radius"))
df2 = calcul_isInside(lat, lon, radius).departure.value_counts().reset_index()
df2.columns = ['departure', 'count']
http://localhost/api/agg/departures/?lat=45.2333&lon=-1&radius=100
"""
lat = None
lon = None
radius = None
try:
lat = float(request.args.get("lat"))
lon = float(request.args.get("lon"))
radius = int(request.args.get("radius"))
except ValueError:
#We go on, silenciously, by selecting all data
pass
#df2 = calcul_isInside(lat, lon, radius).departure.value_counts().reset_index()
#df2.columns = ['departure', 'count']
if lat != None and lon != None and radius!= None :
query = """select departure, count(*) as count from navigoviz.built_travels
where source_entry<> 'both-to' and
st_distance(departure_point, st_setsrid(st_transform(st_setsrid(st_makepoint(%f, %f), 4326), 3857), 3857)) < %f
group by departure""" % (lon,lat,radius*1000)
else :
query = """select departure, count(*)
from navigoviz.built_travels
where source_entry<> 'both-to'
group by departure"""
#print(query)
df2 = retrieveDataFromPostgres(query)
return formatOutput(df2)
return formatOutput(df2, 'travels')
#return json.loads(json.dumps(df2.to_json(orient='records')))
@app.route('/api/agg/destinations/', methods = ['GET'])
def getDestinationsAgg():
"""
Return the count of destinations, for the points located in a 100 km radius neighbourhood from the lat/lon given in parameter
Return the count of destination for each different admiralties,
for the points located in a radius km neighbourhood from the lat/lon given in parameter
Will be extracted from postgres, schema navigoviz, table built_travels (see navigocorpus/ETL),
but with a filter by default : only source_entry = from and both-from, to avoid duplicates
http://localhost/api/agg/destinations/?lat=45.2333&lon=-1&radius=100
"""
lat = float(request.args.get("lat"))
lon = float(request.args.get("lon"))
radius = int(request.args.get("radius"))
df3 = calcul_isInside(lat, lon, radius).destination_amiraute.value_counts().reset_index()
df3.columns = ['label', 'value']
lat = None
lon = None
radius = None
try:
lat = float(request.args.get("lat"))
lon = float(request.args.get("lon"))
radius = int(request.args.get("radius"))
except ValueError:
#We go on, silenciously, by selecting all data
pass
#df3 = calcul_isInside(lat, lon, radius).destination_amiraute.value_counts().reset_index()
#df3.columns = ['label', 'value']
if lat != None and lon != None and radius!= None :
query = """select destination_admiralty as label, count(*) as value from navigoviz.built_travels
where source_entry<> 'both-to'
and st_distance(departure_point, st_setsrid(st_transform(st_setsrid(st_makepoint(%f, %f), 4326), 3857), 3857)) < %f
group by destination_admiralty""" % (lon,lat,radius*1000)
else :
query = """select destination_admiralty as label, count(*) as value
from navigoviz.built_travels
where source_entry<> 'both-to'
group by destination_admiralty"""
#print(query)
df3 = retrieveDataFromPostgres(query)
df3['id'] = df3['label']
return formatOutput(df3)
return formatOutput(df3, 'travels')
#return json.loads(json.dumps(df3.to_json(orient='records')))
......
......@@ -4,4 +4,4 @@
import sys
import os, os.path
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '.')))
from porticwebapi import app as application
from porticapi import app as application
......@@ -2,13 +2,13 @@ API;name;shortname;Exemple;Type;Description
pointcalls;pkid;p00;2;int;Identifiant du pointcall
pointcalls;pointcall;p01;Saint Malo;text;Toponyme du port de départ
pointcalls;pointcall_uhgs_id;p02;A0170819;text;Identifiant géogénéral du port de départ
pointcalls;pointcall_latitude;p03;48.65;float;latitude
pointcalls;pointcall_longitude;p04;-2.016667;float;longitude
pointcalls;latitude;p03;48.65;float;latitude
pointcalls;longitude;p04;-2.016667;float;longitude
pointcalls;pointcall_admiralty;p05;Saint-Malo;text;amirauté d'appartenance du port
pointcalls;pointcall_province;p06;Bretagne;text;province d'appartenance du port
pointcalls;pointcall_states;p07;;json;"états d'appartenance structurés ainsi : un dictionnaire de couple de dates, état d'appartenance. Les 2 dates les plus extrêmes sont 1749 et 1815. Exemple : [{1749-1768 : République de Gênes}, {1768-1814 : France}]"
pointcalls;pointcall_status;p08;siège d'amirauté;text;oblique | siège d'amirauté | rien
pointcalls;pointcall_shiparea;p09;;text;Shipping area telle que calculée par J-P. Dedieu (mer ou ocean et région côtière du point)
pointcalls;shiparea;p09;;text;Shipping area telle que calculée par J-P. Dedieu (mer ou ocean et région côtière du point)
pointcalls;pointcall_point;p10;;text;WKT point en EPSG 3857
pointcalls;pointcall_out_date;p11;;text;Date de sortie telle que écrite dans la source
pointcalls;pointcall_action;p12;;text;In | In-Out | Out | Transit | Loading | Captured | …
......@@ -16,13 +16,13 @@ pointcalls;outdate_fixed;p13;1787-11-10;date;Date transformée si renseignée de
pointcalls;pointcall_in_date;p14;;text;Date d'arrivée telle que écrite dans la source
pointcalls;indate_fixed;p15;;date;Date transformée si renseignée de l'observation à la destination : yyyy-MM-DD
pointcalls;net_route_marker;p16;;text;A ou Z. Si Z, ne pas garder pour composer la route
pointcalls;pointcall_fullrank;p17;;int;Rang de visite d'un port du navire, toutes sources confondues
pointcalls;pointcall_rankfull;p17;;int;Rang de visite d'un port du navire, toutes sources confondues
pointcalls;date_fixed;p18;;date;Date exacte de visite du port si connue (déduite de outdate et indate : coalesce(p.outdate_fixed, p.indate_fixed)
pointcalls;navigo_status;p19;;text;"Passé, ou futur par rapport au point d’observation. Plus intention non realise “J’aurais voulu aller à Malte… mais j’ai été capturé avant » Voir http://navigocorpus.org/iDocuments/Documents/Brief%20manual%20of%20use%20for%20Navigocorpus%20database.pdf "
pointcalls;pointcall_function;p20;;text;"Anciennement, O = point d’observation, T = Terminus (dernier point mentionné dans l’itinéraire futur déclaré), A = premier point mentionné dans l’itinéraire passé. NB : ce champ n’est plus prévu pour la saisie. Le nouveau champ qui remplace est « data_block_leader_marker » et désormais O devient A ; T reste T. A devient I ? AUCUN I vu dans la colonne. Que des O ou des A. Pas de T"
pointcalls;ship_name;s00;Postillon;text;Nom du navire tel que lu dans la source
pointcalls;ship_id;s01;0012023N;text;Codage pour identifier le navire uniquement à travers plusieurs déclarations dans les sources
pointcalls;tonnage;s02;12;float;"Tonnage du navire, noté en réel (virgule pour décimales). 1 tonneau = 24 quintaux, donc les valeurs de Marseille exprimés généralement en quintaux doivent être converties en tonneaux en divisant par 24. Mail Silvia du 29 janvier 2020 : Oui mais En faisant attention que l'unité de mesure exprimée soit le quintal car on a également rapporté à Marseille les tonnages en tonneaux de navires identifiés dans d'autres ports."
pointcalls;tonnage;s02;12;float;"Tonnage du navire, noté en réel (virgule pour décimales). 1 tonneau = 24 quintaux, donc les valeurs de Marseille exprimés généralement en quintaux doivent être converties en tonneaux en divisant par 24, mais en faisant attention que l'unité de mesure exprimée soit le quintal car on a également rapporté à Marseille les tonnages en tonneaux de navires identifiés dans d'autres ports."
pointcalls;tonnage_unit;s03;;text;Unité du tonnage. 46 valeurs différentes. A harmoniser
pointcalls;flag;s04;French;text;Pavillon du navire, tel que lu dans la source --> afficher etat d'appartenance dans la viz (celui du homeport)
pointcalls;class;s05;;text;Catégorie de navire (telle que lue dans la source)
......@@ -99,7 +99,7 @@ travels;destination_admiralty;d05;Quimper;text;amirauté d'appartenance du port
travels;destination_province;d06;Bretagne;text;province d'appartenance du port
travels;destination_states;d07;;json;"états d'appartenance structurés ainsi : un dictionnaire de couple de dates, état d'appartenance. Les 2 dates les plus extrêmes sont 1749 et 1815. Exemple : [{1749-1768 : République de Gênes}, {1768-1814 : France}]"
travels;destination_status;d08;;text;oblique | siège d'amirauté
travels;departure_shiparea;d09;;text;Shipping area telle que calculée par J-P. Dedieu (mer ou ocean et région côtière du point)
travels;destination_shiparea;d09;;text;Shipping area telle que calculée par J-P. Dedieu (mer ou ocean et région côtière du point)
travels;destination_point;d10;;text;WKT point en EPSG 3857
travels;destination_action;d11;;text;In | In-Out | Out | Transit | Loading | Captured | … Si renseignée (Marseille)
travels;destination_in_date;d12;;text;Date d'arrivée telle que écrite dans la source
......@@ -116,7 +116,7 @@ travels;homeport;t10;Binic;text;Port d'attache du navire
travels;homeport_uhgs_id;t11;A0127863;text;Identifiant géogénéral du port d'attache
travels;homeport_latitude;t12;48.6;float;latitude
travels;homeport_longitude;t13;-2.833333;float;longitude
travels;homeport_amiraute;t14;Saint-Brieuc;text;amirauté d'appartenance du port
travels;homeport_admiralty;t14;Saint-Brieuc;text;amirauté d'appartenance du port
travels;homeport_province;t15;Bretagne;text;province d'appartenance du port
travels;homeport_states;t16;;json;"états d'appartenance structurés ainsi : un dictionnaire de couple de dates, état d'appartenance. Les 2 dates les plus extrêmes sont 1749 et 1815. Exemple : [{1749-1768 : République de Gênes}, {1768-1814 : France}]"
travels;homeport_status;t17;;text;oblique | siège d'amirauté
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment