@@ -44,9 +44,6 @@ const {
4444 'lower-case-lang-codes' : {
4545 type : 'boolean' ,
4646 } ,
47- 'stops-location-index' : {
48- type : 'boolean' ,
49- } ,
5047 'stats-by-route-date' : {
5148 type : 'string' ,
5249 } ,
@@ -59,21 +56,6 @@ const {
5956 'schema' : {
6057 type : 'string' ,
6158 } ,
62- 'postgraphile' : {
63- type : 'boolean' ,
64- } ,
65- 'postgraphile-password' : {
66- type : 'string' ,
67- } ,
68- 'postgrest' : {
69- type : 'boolean' ,
70- } ,
71- 'postgrest-password' : {
72- type : 'string' ,
73- } ,
74- 'postgrest-query-cost-limit' : {
75- type : 'string' ,
76- } ,
7759 'import-metadata' : {
7860 type : 'boolean' ,
7961 }
@@ -84,7 +66,7 @@ const {
8466if ( flags . help ) {
8567 process . stdout . write ( `
8668Usage:
87- gtfs-to-sql [options] [--] <gtfs-file> ...
69+ import- gtfs-into-duckdb [options] [--] <path-to-duckdb> <gtfs-file> ...
8870Options:
8971 --silent -s Don't show files being converted.
9072 --require-dependencies -d Require files that the specified GTFS files depend
@@ -102,8 +84,6 @@ Options:
10284 --routes-without-agency-id Don't require routes.txt items to have an agency_id.
10385 --stops-without-level-id Don't require stops.txt items to have a level_id.
10486 Default if levels.txt has not been provided.
105- --stops-location-index Create a spatial index on stops.stop_loc for efficient
106- queries by geolocation.
10787 --lower-case-lang-codes Accept Language Codes (e.g. in feed_info.feed_lang)
10888 with a different casing than the official BCP-47
10989 language tags (as specified by the GTFS spec),
@@ -124,34 +104,18 @@ Options:
124104 currently running trips over time, by hour.
125105 Like --stats-by-route-date, this flag accepts
126106 none, view & materialized-view.
127- --schema The schema to use for the database. Default: public
128- Even when importing into a schema other than \`public\`,
129- a function \`public.gtfs_via_postgres_import_version()\`
130- gets created, to ensure that multiple imports into the
131- same database are all made using the same version. See
132- also multiple-datasets.md in the docs.
133- --postgraphile Tweak generated SQL for PostGraphile usage.
134- https://www.graphile.org/postgraphile/
135- --postgraphile-password Password for the PostGraphile PostgreSQL user.
136- Default: $POSTGRAPHILE_PGPASSWORD, fallback random.
137- --postgrest Tweak generated SQL for PostgREST usage.
138- Please combine it with --schema.
139- https://postgrest.org/
140- --postgrest-password Password for the PostgREST PostgreSQL user \`web_anon\`.
141- Default: $POSTGREST_PGPASSWORD, fallback random.
142- --postgrest-query-cost-limit Define a cost limit [1] for queries executed by PostgREST
143- on behalf of a user. It is only enforced if
144- pg_plan_filter [2] is installed in the database!
145- Must be a positive float. Default: none
146- [1] https://www.postgresql.org/docs/14/using-explain.html
147- [2] https://github.com/pgexperts/pg_plan_filter
107+ --schema The schema to use for the database. Default: main
108+ May not contain \`.\`.
148109 --import-metadata Create functions returning import metadata:
149110 - gtfs_data_imported_at (timestamp with time zone)
150111 - gtfs_via_postgres_version (text)
151112 - gtfs_via_postgres_options (jsonb)
113+ Notes:
114+ If you just want to check if the GTFS data can be imported but don't care about the
115+ resulting DuckDB database file, you can import into an in-memory database by specifying
116+ \`:memory:\` as the <path-to-duckdb>.
152117Examples:
153- gtfs-to-sql some-gtfs/*.txt | sponge | psql -b # import into PostgreSQL
154- gtfs-to-sql -u -- some-gtfs/*.txt | gzip >gtfs.sql.gz # generate a gzipped SQL dump
118+ import-gtfs-into-duckdb some-gtfs.duckdb some-gtfs/*.txt
155119
156120[1] https://developers.google.com/transit/gtfs/reference/extended-route-types
157121[2] https://groups.google.com/g/gtfs-changes/c/keT5rTPS7Y0/m/71uMz2l6ke0J
@@ -165,11 +129,11 @@ if (flags.version) {
165129}
166130
167131const { basename, extname} = require ( 'path' )
168- const { pipeline} = require ( 'stream' )
169132const convertGtfsToSql = require ( './index' )
170- const DataError = require ( './lib/data-error' )
171133
172- const files = args . map ( ( file ) => {
134+ const [ pathToDb ] = args
135+
136+ const files = args . slice ( 1 ) . map ( ( file ) => {
173137 const name = basename ( file , extname ( file ) )
174138 return { name, file}
175139} )
@@ -185,9 +149,7 @@ const opt = {
185149 statsByRouteIdAndDate : flags [ 'stats-by-route-date' ] || 'none' ,
186150 statsByAgencyIdAndRouteIdAndStopAndHour : flags [ 'stats-by-agency-route-stop-hour' ] || 'none' ,
187151 statsActiveTripsByHour : flags [ 'stats-active-trips-by-hour' ] || 'none' ,
188- schema : flags [ 'schema' ] || 'public' ,
189- postgraphile : ! ! flags . postgraphile ,
190- postgrest : ! ! flags . postgrest ,
152+ schema : flags [ 'schema' ] || 'main' ,
191153 importMetadata : ! ! flags [ 'import-metadata' ] ,
192154}
193155if ( 'stops-without-level-id' in flags ) {
@@ -196,31 +158,11 @@ if ('stops-without-level-id' in flags) {
196158if ( 'lower-case-lang-codes' in flags ) {
197159 opt . lowerCaseLanguageCodes = flags [ 'lower-case-lang-codes' ]
198160}
199- if ( 'postgraphile-password' in flags ) {
200- opt . postgraphilePassword = flags [ 'postgraphile-password' ]
201- }
202- if ( 'postgrest-password' in flags ) {
203- opt . postgrestPassword = flags [ 'postgrest-password' ]
204- }
205- if ( 'postgrest-query-cost-limit' in flags ) {
206- const limit = parseFloat ( flags [ 'postgrest-query-cost-limit' ] )
207- if ( ! Number . isFinite ( limit ) || limit < 0 ) {
208- console . error ( 'Invalid --postgrest-query-cost-limit value.' )
209- process . exit ( 1 )
210- }
211- opt . lowerCaseLanguageCodes = limit
212- }
213161
214- pipeline (
215- convertGtfsToSql ( files , opt ) ,
216- process . stdout ,
217- ( err ) => {
218- if ( ! err ) return ;
219- if ( err instanceof DataError ) {
220- console . error ( String ( err ) )
221- } else if ( err . code !== 'EPIPE' ) {
222- console . error ( err )
223- }
224- process . exit ( 1 )
162+ convertGtfsToSql ( pathToDb , files , opt )
163+ . catch ( ( err ) => {
164+ if ( err . code !== 'EPIPE' ) { // todo: check still necessary? we don't pipe anymore
165+ console . error ( err )
225166 }
226- )
167+ process . exit ( 1 )
168+ } )
0 commit comments