Skip to content

Commit 7ecbdc7

Browse files
committed
headers
1 parent 870ab4d commit 7ecbdc7

2 files changed

Lines changed: 86 additions & 0 deletions

File tree

Lines changed: 83 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,83 @@
1+
process.env.LOG_SCOPE = 'pgsql-test';
2+
3+
import fs from 'fs';
4+
import path from 'path';
5+
6+
import { seed } from '../src';
7+
import { getConnections } from '../src/connect';
8+
import { exportTableToCsv } from '../src/seed/csv';
9+
import { PgTestClient } from '../src/test-client';
10+
11+
const csv = (file: string) => path.resolve(__dirname, '../csv', file);
12+
13+
let pg: PgTestClient;
14+
let teardown: () => Promise<void>;
15+
16+
beforeAll(async () => {
17+
({ pg, teardown } = await getConnections({}, [
18+
// 1. Create schema with SERIAL primary keys
19+
seed.fn(async ({ pg }) => {
20+
await pg.query(`
21+
CREATE SCHEMA custom;
22+
CREATE TABLE custom.users (
23+
id SERIAL PRIMARY KEY,
24+
name TEXT NOT NULL
25+
);
26+
27+
CREATE TABLE custom.posts (
28+
id SERIAL PRIMARY KEY,
29+
user_id INT REFERENCES custom.users(id),
30+
content TEXT NOT NULL
31+
);
32+
`);
33+
}),
34+
35+
// 2. Seed from CSV using COPY FROM STDIN
36+
seed.csv({
37+
'custom.users': csv('users.csv'),
38+
'custom.posts': csv('posts-subset-header.csv')
39+
}),
40+
41+
// 3. Fix SERIAL sequences to match the highest used ID
42+
seed.fn(async ({ pg }) => {
43+
await pg.query(`SELECT setval(pg_get_serial_sequence('custom.users', 'id'), (SELECT MAX(id) FROM custom.users));`);
44+
await pg.query(`SELECT setval(pg_get_serial_sequence('custom.posts', 'id'), (SELECT MAX(id) FROM custom.posts));`);
45+
})
46+
]));
47+
});
48+
49+
afterAll(async () => {
50+
await teardown();
51+
});
52+
53+
it('csv in/out', async () => {
54+
// 4. Insert new data without specifying IDs (uses SERIAL)
55+
await pg.query(`
56+
INSERT INTO custom.users (name) VALUES ('Carol');
57+
INSERT INTO custom.posts (user_id, content) VALUES (3, 'Carol''s first post');
58+
`);
59+
60+
// 5. Validate full contents
61+
const res = await pg.query(`
62+
SELECT custom.users.name, custom.posts.content
63+
FROM custom.posts
64+
JOIN custom.users ON custom.users.id = custom.posts.user_id
65+
ORDER BY custom.users.id
66+
`);
67+
68+
expect(res.rows).toEqual([
69+
{ name: 'Alice', content: 'Hello world!' },
70+
{ name: 'Bob', content: 'Graphile is cool!' },
71+
{ name: 'Carol', content: "Carol's first post" }
72+
]);
73+
74+
// 6. Ensure output directory exists
75+
const outDir = path.resolve(__dirname, '../output');
76+
fs.mkdirSync(outDir, { recursive: true });
77+
78+
// 7. Export updated tables to CSV
79+
await exportTableToCsv(pg, 'custom.users', path.join(outDir, 'users.csv'));
80+
await exportTableToCsv(pg, 'custom.posts', path.join(outDir, 'posts.csv'));
81+
82+
console.log(`📤 Exported users and posts to ${outDir}`);
83+
});
Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
id,content,user_id
2+
1,'sdf',1
3+
2,'sdf',2

0 commit comments

Comments
 (0)