Closed boris-hocde closed 2 months ago
Sure thing :) Check out the custom socket option https://github.com/porsager/postgres?tab=readme-ov-file#custom-socket
import postgres from 'postgres'
import { Connector, IpAddressTypes } from '@google-cloud/cloud-sql-connector'
const sql = postgres({
database: process.env.POPUPS_DB_DATABASE,
username: process.env.POPUPS_DB_USERNAME,
password: process.env.POPUPS_DB_PASSWORD,
socket: async() => {
const connector = new Connector()
const { stream } = await connector.getOptions({
instanceConnectionName: process.env.POPUPS_DB_HOSTS,
ipType: IpAddressTypes.PUBLIC,
})
return stream
},
connection: {
statement_timeout: 5000,
}
})
export default sql
You want to do it this way to ensure reconnection can work too.
Amazing, that works like a charm! Thank you so much :clap:
Note that it works with:
const { stream } = await connector.getOptions({
instanceConnectionName: host,
ipType: IpAddressTypes.PUBLIC,
});
const connectOptions = {
socket: stream
};
But this doesn't:
const connectOptions = {
socket: async () => {
const { stream } = await connector.getOptions({
instanceConnectionName: host,
ipType: IpAddressTypes.PUBLIC,
});
return stream;
}
};
I can't really say why. The error:
FATAL (218): unhandledRejection
promise: {}
reason: {}
I'm trying to connect
postgres.js
to my GCP PostgreSQL instance, through cloud-sql-nodejs-connector, with no luck. Google documentation.The logged
connectOptions
looks like this:Connection error details
``` ERROR (358): err: { "type": "AggregateError", "message": "", "stack": AggregateError: at internalConnectMultiple (node:net:1116:18) at afterConnectMultiple (node:net:1683:7) at TCPConnectWrap.callbackTrampoline (node:internal/async_hooks:130:17) at cachedError (file:///code/node_modules/postgres/src/query.js:170:23) at new Query (file:///code/node_modules/postgres/src/query.js:36:24) at sql (file:///code/node_modules/postgres/src/index.js:112:11) at sql (file:///code/src/repositories/popups.ts:18:15) "aggregateErrors": [ { "type": "Error", "message": "connect ECONNREFUSED ::1:5432", "stack": Error: connect ECONNREFUSED ::1:5432 at createConnectionError (node:net:1646:14) at afterConnectMultiple (node:net:1676:16) at TCPConnectWrap.callbackTrampoline (node:internal/async_hooks:130:17) "errno": -111, "code": "ECONNREFUSED", "syscall": "connect", "address": "::1", "port": 5432 }, { "type": "Error", "message": "connect ECONNREFUSED 127.0.0.1:5432", "stack": Error: connect ECONNREFUSED 127.0.0.1:5432 at createConnectionError (node:net:1646:14) at afterConnectMultiple (node:net:1676:16) at TCPConnectWrap.callbackTrampoline (node:internal/async_hooks:130:17) "errno": -111, "code": "ECONNREFUSED", "syscall": "connect", "address": "127.0.0.1", "port": 5432 } ], "code": "ECONNREFUSED" } ```sql.options
``` { host: [ 'localhost' ], port: [ 5432 ], path: false, database: 'wisepops', user: 'wisepops', pass: 'MASKED', max: 10, ssl: false, idle_timeout: null, connect_timeout: 30, max_lifetime: [Function: max_lifetime], max_pipeline: 100, backoff: [Function: backoff], keep_alive: 60, prepare: true, debug: false, fetch_types: true, publications: 'alltables', target_session_attrs: undefined, connection: { application_name: 'postgres.js', statement_timeout: 5000 }, types: { bigint: { to: 20, from: [Array], parse: [Function: parse], serialize: [Function: serialize] } }, onnotice: undefined, onnotify: undefined, onclose: undefined, onparameter: undefined, socket: undefined, transform: { undefined: undefined, column: { from: undefined, to: undefined }, value: { from: undefined, to: undefined }, row: { from: undefined, to: undefined } }, parameters: {}, shared: { retries: 0, typeArrayMap: {} }, serializers: { '0': [Function: serialize], '16': [Function: serialize], '17': [Function: serialize], '20': [Function: serialize], '21': [Function: serialize], '23': [Function: serialize], '25': [Function: serialize], '26': [Function: serialize], '114': [Function: serialize], '700': [Function: serialize], '701': [Function: serialize], '1082': [Function: serialize], '1114': [Function: serialize], '1184': [Function: serialize], '3802': [Function: serialize] }, parsers: { '16': [Function: parse], '17': [Function: parse], '20': [Function: parse], '21': [Function: parse], '23': [Function: parse], '26': [Function: parse], '114': [Function: parse], '700': [Function: parse], '701': [Function: parse], '1082': [Function: parse], '1114': [Function: parse], '1184': [Function: parse], '3802': [Function: parse] } } ```Working example with `pg`
```ts import pg from 'pg'; import {Connector, IpAddressTypes} from "@google-cloud/cloud-sql-connector"; const {Pool} = pg; export const connect = async () => { const connector = new Connector(); const connectOptions = await connector.getOptions({ instanceConnectionName: process.env.POPUPS_DB_HOSTS, ipType: IpAddressTypes.PUBLIC, }); console.log(connectOptions); const pool = new Pool({ ...connectOptions, database: process.env.POPUPS_DB_DATABASE, user: process.env.POPUPS_DB_USERNAME, password: process.env.POPUPS_DB_PASSWORD, max: 5, }); console.log(await pool.query('SELECT 1')); }; ```Is this something that
postgres.js
can handle? Many thanks!