@@ -44,9 +44,6 @@ const {
4444 'lower-case-lang-codes' : {
4545 type : 'boolean' ,
4646 } ,
47- 'stops-location-index' : {
48- type : 'boolean' ,
49- } ,
5047 'stats-by-route-date' : {
5148 type : 'string' ,
5249 } ,
@@ -59,21 +56,6 @@ const {
5956 'schema' : {
6057 type : 'string' ,
6158 } ,
62- 'postgraphile' : {
63- type : 'boolean' ,
64- } ,
65- 'postgraphile-password' : {
66- type : 'string' ,
67- } ,
68- 'postgrest' : {
69- type : 'boolean' ,
70- } ,
71- 'postgrest-password' : {
72- type : 'string' ,
73- } ,
74- 'postgrest-query-cost-limit' : {
75- type : 'string' ,
76- } ,
7759 'import-metadata' : {
7860 type : 'boolean' ,
7961 }
@@ -84,7 +66,7 @@ const {
8466if ( flags . help ) {
8567 process . stdout . write ( `
8668Usage:
87- gtfs-to-sql [options] [--] <gtfs-file> ...
69+ import- gtfs-into-duckdb [options] [--] <path-to-duckdb> <gtfs-file> ...
8870Options:
8971 --silent -s Don't show files being converted.
9072 --require-dependencies -d Require files that the specified GTFS files depend
@@ -101,8 +83,6 @@ Options:
10183 --routes-without-agency-id Don't require routes.txt items to have an agency_id.
10284 --stops-without-level-id Don't require stops.txt items to have a level_id.
10385 Default if levels.txt has not been provided.
104- --stops-location-index Create a spatial index on stops.stop_loc for efficient
105- queries by geolocation.
10686 --lower-case-lang-codes Accept Language Codes (e.g. in feed_info.feed_lang)
10787 with a different casing than the official BCP-47
10888 language tags (as specified by the GTFS spec),
@@ -123,34 +103,18 @@ Options:
123103 currently running trips over time, by hour.
124104 Like --stats-by-route-date, this flag accepts
125105 none, view & materialized-view.
126- --schema The schema to use for the database. Default: public
127- Even when importing into a schema other than \`public\`,
128- a function \`public.gtfs_via_postgres_import_version()\`
129- gets created, to ensure that multiple imports into the
130- same database are all made using the same version. See
131- also multiple-datasets.md in the docs.
132- --postgraphile Tweak generated SQL for PostGraphile usage.
133- https://www.graphile.org/postgraphile/
134- --postgraphile-password Password for the PostGraphile PostgreSQL user.
135- Default: $POSTGRAPHILE_PGPASSWORD, fallback random.
136- --postgrest Tweak generated SQL for PostgREST usage.
137- Please combine it with --schema.
138- https://postgrest.org/
139- --postgrest-password Password for the PostgREST PostgreSQL user \`web_anon\`.
140- Default: $POSTGREST_PGPASSWORD, fallback random.
141- --postgrest-query-cost-limit Define a cost limit [1] for queries executed by PostgREST
142- on behalf of a user. It is only enforced if
143- pg_plan_filter [2] is installed in the database!
144- Must be a positive float. Default: none
145- [1] https://www.postgresql.org/docs/14/using-explain.html
146- [2] https://github.com/pgexperts/pg_plan_filter
106+ --schema The schema to use for the database. Default: main
107+ May not contain \`.\`.
147108 --import-metadata Create functions returning import metadata:
148109 - gtfs_data_imported_at (timestamp with time zone)
149110 - gtfs_via_postgres_version (text)
150111 - gtfs_via_postgres_options (jsonb)
112+ Notes:
113+ If you just want to check if the GTFS data can be imported but don't care about the
114+ resulting DuckDB database file, you can import into an in-memory database by specifying
115+ \`:memory:\` as the <path-to-duckdb>.
151116Examples:
152- gtfs-to-sql some-gtfs/*.txt | sponge | psql -b # import into PostgreSQL
153- gtfs-to-sql -u -- some-gtfs/*.txt | gzip >gtfs.sql.gz # generate a gzipped SQL dump
117+ import-gtfs-into-duckdb some-gtfs.duckdb some-gtfs/*.txt
154118
155119[1] https://developers.google.com/transit/gtfs/reference/extended-route-types
156120[2] https://groups.google.com/g/gtfs-changes/c/keT5rTPS7Y0/m/71uMz2l6ke0J
@@ -164,11 +128,11 @@ if (flags.version) {
164128}
165129
166130const { basename, extname} = require ( 'path' )
167- const { pipeline} = require ( 'stream' )
168131const convertGtfsToSql = require ( './index' )
169- const DataError = require ( './lib/data-error' )
170132
171- const files = args . map ( ( file ) => {
133+ const [ pathToDb ] = args
134+
135+ const files = args . slice ( 1 ) . map ( ( file ) => {
172136 const name = basename ( file , extname ( file ) )
173137 return { name, file}
174138} )
@@ -184,9 +148,7 @@ const opt = {
184148 statsByRouteIdAndDate : flags [ 'stats-by-route-date' ] || 'none' ,
185149 statsByAgencyIdAndRouteIdAndStopAndHour : flags [ 'stats-by-agency-route-stop-hour' ] || 'none' ,
186150 statsActiveTripsByHour : flags [ 'stats-active-trips-by-hour' ] || 'none' ,
187- schema : flags [ 'schema' ] || 'public' ,
188- postgraphile : ! ! flags . postgraphile ,
189- postgrest : ! ! flags . postgrest ,
151+ schema : flags [ 'schema' ] || 'main' ,
190152 importMetadata : ! ! flags [ 'import-metadata' ] ,
191153}
192154if ( 'stops-without-level-id' in flags ) {
@@ -195,31 +157,11 @@ if ('stops-without-level-id' in flags) {
195157if ( 'lower-case-lang-codes' in flags ) {
196158 opt . lowerCaseLanguageCodes = flags [ 'lower-case-lang-codes' ]
197159}
198- if ( 'postgraphile-password' in flags ) {
199- opt . postgraphilePassword = flags [ 'postgraphile-password' ]
200- }
201- if ( 'postgrest-password' in flags ) {
202- opt . postgrestPassword = flags [ 'postgrest-password' ]
203- }
204- if ( 'postgrest-query-cost-limit' in flags ) {
205- const limit = parseFloat ( flags [ 'postgrest-query-cost-limit' ] )
206- if ( ! Number . isFinite ( limit ) || limit < 0 ) {
207- console . error ( 'Invalid --postgrest-query-cost-limit value.' )
208- process . exit ( 1 )
209- }
210- opt . lowerCaseLanguageCodes = limit
211- }
212160
213- pipeline (
214- convertGtfsToSql ( files , opt ) ,
215- process . stdout ,
216- ( err ) => {
217- if ( ! err ) return ;
218- if ( err instanceof DataError ) {
219- console . error ( String ( err ) )
220- } else if ( err . code !== 'EPIPE' ) {
221- console . error ( err )
222- }
223- process . exit ( 1 )
161+ convertGtfsToSql ( pathToDb , files , opt )
162+ . catch ( ( err ) => {
163+ if ( err . code !== 'EPIPE' ) { // todo: check still necessary? we don't pipe anymore
164+ console . error ( err )
224165 }
225- )
166+ process . exit ( 1 )
167+ } )
0 commit comments