SQLitening Support Forum

Support Forums => READ THIS BEFORE REGISTERING FOR THIS FORUM => Topic started by: allenr on November 05, 2015, 12:01:29 PM

Title: Introduction
Post by: allenr on November 05, 2015, 12:01:29 PM
Hi,

I am a semi-retired business owner that has written applications in support on my plant operations.
I realized a need to upgrade from using comma delimited text data files to something a little more robust.
I looked at  various ways to achieve my data handling, but have decided to go with SQLitening because of the supportive people in the PB community.
I hope to add as much as I can along the way.

regards,

Ron
Title: Re: Introduction
Post by: cj on November 05, 2015, 04:21:01 PM
This might give you some ideas

#INCLUDE "sqlitening.inc"
'
FUNCTION PBMAIN () AS LONG     'CSV2Sqlite.bas
  LOCAL sCSVFile,sDataBaseName,sTableName AS STRING, s AS STRING
  LOCAL DropTable,QuoteNumbers, counter AS LONG
  sCSVFile = "work1.csv"        'input csv file
  sDataBaseName = "sample.db3"  'sqlite database to write to
  sTableName    = "T1"          'table to write to
  DropTable     = 1             '0=append, 1= start fresh
  QuoteNumbers  = 1             'enclose all columns with $SQ even when numeric value
  '-----------------------------------------------------
  '? "create sample data file " + sCSVFile
  OPEN sCSVFile FOR OUTPUT AS #1
  FOR counter=1 TO 30 '3 comma delimited columns
    PRINT #1,USING$("#_,&, &",counter,"heidi","klum")
  NEXT
  CLOSE #1
  '----------------------------------------------------
  counter = Csv2Sqlite(sCsvFile,sDatabaseName,sTableName,DropTable,QuoteNumbers)
  LOCAL sResults() AS STRING
  slselAry "select * from " + sTableName,sResults(),"Q9"
  ? JOIN$(sResults(),$CR),,sTableName + " table"
END FUNCTION
'
FUNCTION Csv2SQLite ( _
  sCSVFILE AS STRING, _
  sDataBaseName AS STRING , _
  sTableName    AS STRING,  _
  DropTable     AS LONG,    _
  QuoteNumbers  AS LONG) AS LONG

  'Create table with sColumnNames$ = "C1,C2, ..."

  LOCAL x, hFile,cols AS LONG
  LOCAL sInputLine,sColumnNames,sOutputLine,s,sInsert AS STRING
  hFile = FREEFILE
  OPEN sCSVFile FOR INPUT AS #hFile
  LINE INPUT #hFile, sInputLine 'read first line
  cols = PARSECOUNT(sInputLine) 'number of columns
  CLOSE #hFile
  'first line could be column names, using C1,C2, ..."
  FOR x = 1 TO cols
    sColumnNames = sColumnNames + "C" + FORMAT$(x)+ ","
  NEXT
  sColumnNames = LEFT$(sColumnNames,-1) 'your column names
  slOpen sDataBaseName,"C"
  IF DropTable THEN slexe "Drop table if exists " + sTableName
  slexe "Create Table if not exists " + sTableName + "(" + sColumnNames + ")"
  slexe "BEGIN EXCLUSIVE"
  hFile = FREEFILE
  OPEN sCSVFile FOR INPUT AS #hFile
  DO UNTIL EOF(#hFile) 'process line
    LINE INPUT #hFile, sInputLine
    RESET sOutputLine
    FOR x = 1 TO cols
      s = PARSE$(sInputLine,x)
        IF QuoteNumbers THEN        'enclose all columns in $SQ even if numeric
           REPLACE $SQ WITH $DQ IN s
           s = $SQ & s & $SQ
      ELSEIF IsNumeric(s) = 0 THEN 'only enclose strings with $SQ
        REPLACE $SQ WITH $DQ IN s
        s = $SQ & s & $SQ
      END IF
        sOutputLine = sOutPutLine +  s + ","
    NEXT
    sOutPutLine = LEFT$(sOutputLine,-1)
    sInsert = "Insert into " + sTableName + " values (" + sOutputline + ")"
    REM ? sOutputLine:end 'look at first output record and end
    slExe sInsert
  LOOP
  slEXE "END"
  LOCAL sArray() AS STRING 'return number of inserts
  slSelAry "select count(*) from " + sTableName,sArray(),"Q9c"
  FUNCTION = VAL(JOIN$(sArray(),$CRLF))
END FUNCTION
'
FUNCTION isNumeric(Answer AS STRING) AS LONG
  Answer = TRIM$(Answer) 'may be required if right-justified text
  IF (LEN(Answer) = 0)                  OR _
     (VERIFY (Answer , ".-0123456789")) OR _
     (RIGHT$(Answer,1) = ".")           OR _
     (INSTR(-1,Answer,"-") > 1)         OR _
     (TALLY(Answer,".") > 1) THEN
     EXIT FUNCTION  'exit, return 0 not-numeric
  END IF
  FUNCTION = -1 'numeric
END FUNCTION
Title: Re: Introduction
Post by: Bern Ertl on November 09, 2015, 10:23:23 AM
Hi Ron, welcome to the forum.  Cheers.