Updated on 2025-09-04 GMT+08:00

Preparations

Create a table in the current database and insert data at the million-row scale.

Machine configurations: 8-core CPU; 32 GB of memory

-- Create the test_table table.
gaussdb=# CREATE TABLE test_table ( id SERIAL PRIMARY KEY,name VARCHAR(100),email VARCHAR(100),created_at TIMESTAMP);
CREATE TABLE

-- Insert a million data records into the table.
gaussdb=# INSERT INTO test_table (name,email,created_at) select 'User_' || i,'User_' || i || '@example.com',NOW() - (i * INTERVAL '1 minute') FROM generate_series(1, 1000000) AS i;
INSERT 0 1000000

-- Create another table.
gaussdb=# CREATE TABLE sales_records (record_id BIGSERIAL PRIMARY KEY,region_id INT NOT NULL,store_id INT NOT NULL,product_id INT NOT NULL,sale_date DATE NOT NULL,amount DECIMAL(12,2) NOT NULL,is_refund BOOLEAN DEFAULT false);

-- Insert 2 million data records.
gaussdb=# INSERT INTO sales_records (region_id, store_id, product_id, sale_date, amount) SELECT (random()*9)::INT + 1,(random()*99)::INT + 1,(random()*499)::INT + 1,current_date - (random()*1095)::INT,(random()*9900)::DECIMAL + 100 FROM generate_series(1,2000000);
INSERT 0 2000000