Creating table with the same columns as in a csv
I am writing a stored procedure
which is supposed to take data from a csv
file and insert
into a table
. My problem is that the number of columns
in the csv
file are not fixed(ie number of columns
is va开发者_StackOverflowriable). So I need some way to create a temporary
table
with exactly the same number of columns
as in the csv
file. So that I can use bulk insert
.
Well I tried solving the issue by writing a sp which will take the csv file path as parameter and create a table names as temptable with the same format as that of the number of columns in the csv. CSV file looks like
eid,ename,esalary,etemp
1,Mark,1000,
2,Peter,1000,
Stored Proc script
create proc createtable
@path nvarchar(50)
as
begin
declare @execSQL nvarchar(1000)
declare @tempstr varchar(1000)
declare @col varchar(1000)
declare @table nvarchar(1000)
-- Create a temp table to with one column to hold the first row of the csv file
CREATE TABLE #tbl (line VARCHAR(1000))
SET @execSQL =
'BULK INSERT #tbl
FROM ''' + @path + '''
WITH (
FIELDTERMINATOR =''\n'',
FIRSTROW = 1,
ROWTERMINATOR = ''\n'',
LASTROW = 1
)
'
EXEC sp_executesql @stmt=@execSQL
SET @col = ''
SET @tempstr = (SELECT TOP 1 RTRIM(REPLACE(Line, CHAR(9), ',')) FROM #tbl)
DROP TABLE #tbl
WHILE CHARINDEX(',',@tempstr) > 0
BEGIN
SET @col=@col + LTRIM(RTRIM(SUBSTRING(@tempstr, 1, CHARINDEX(',',@tempstr)-1))) + ' varchar(100),'
SET @tempstr = SUBSTRING(@tempstr, CHARINDEX(',',@tempstr)+1, len(@tempstr))
END
SET @col = @col + @tempstr + ' varchar(100)'
if object_id('temptable') is not null
drop table temptable
SET @table = 'create table temptable (' + @col + ')'
EXEC sp_executesql @stmt=@table
-- Load data from csv
SET @execSQL =
'BULK INSERT temptable
FROM ''' + @path + '''
WITH (
FIELDTERMINATOR ='','',
FIRSTROW = 2,
ROWTERMINATOR = ''\n''
)
'
EXEC sp_executesql @stmt=@execSQL
end
improved nadeems script... A little bit more robust.
This code is excelent for loading multiple CSV files without using the default wizzards.
SET ANSI_NULLS ON
GO
SET QUOTED_IDENTIFIER ON
GO
CREATE proc [dbo].[importeer_csv_as_table]
@path nvarchar(255),
@new_table_name varchar(255),
@field_terminator varchar(255),
@row_terminator varchar(255)
as
begin
declare @execsql nvarchar(max)
declare @tempstr varchar(max)
declare @col varchar(max)
declare @table nvarchar(max)
declare @drop_table varchar(max)
-- Create a temp table to with one column to hold the first row of the csv file
create table #tbl (line varchar(1000))
set @execsql =
'bulk insert #tbl
from ''' + @path + '''
with (
fieldterminator =''' + @row_terminator + ''',
firstrow = 1,
rowterminator = ''' + @row_terminator + ''',
lastrow = 1
)
'
exec sp_executesql @stmt=@execsql
--replace field terminator with comma
update #tbl set line = replace(line, @field_terminator, ',')
set @col = ''
set @tempstr = (select top 1 rtrim(replace(line, char(9), ',')) from #tbl)
drop table #tbl
while charindex(',',@tempstr) > 0
begin
set @col=@col + '[' + ltrim(rtrim(substring(@tempstr, 1, charindex(',',@tempstr)-1))) + '] varchar(max),'
set @tempstr = substring(@tempstr, charindex(',',@tempstr)+1, len(@tempstr))
end
set @col = @col + '[' + @tempstr + '] varchar(max)'
if object_id(@new_table_name) is not null
begin
set @drop_table = 'drop table [' + @new_table_name + ']'
exec sp_executesql @stmt= @drop_table
end
set @table = 'create table [' + @new_table_name + '] (' + @col + ')'
--select @table
exec sp_executesql @stmt=@table
--Load data from csvle
set @execsql =
'bulk insert [' + @new_table_name + ']
from ''' + @path + '''
with (
fieldterminator =''' + @field_terminator + ''',
firstrow = 2,
rowterminator = ''' + @row_terminator + '''
)
'
exec sp_executesql @stmt=@execsql
end
GO
You could use Powershell to process the CSV file, there is an example here which you could probably adapt to take account of the variable number of fields. You can build the SQL to create a table and then issue a bulk load.
精彩评论