I made it parse DirectX DDS and RTSP, so it does do full protocol and algorithm stuff if you stage questions right.. Here are some random tests for PB.
Code: Select all
;CCTV motion detection
; Load the directory path where the images are stored
image_path$ = "C:\path\to\images\"
; Load the list of jpg files in the directory
image_files$ = Dir$(image_path$ + "*.jpg")
; Initialize the variables
prev_image = 0
threshold = 0.1
; Loop through each image file
For i = 0 To NumberOfElements(image_files$) - 1
    ; Load the current image
    curr_image = LoadImage(image_path$ + image_files$[i])
    ; If this is not the first image, compare it with the previous image
    If prev_image <> 0
        diff_pixels = 0
        For x = 0 To ImageWidth(curr_image) - 1
            For y = 0 To ImageHeight(curr_image) - 1
                curr_pixel = GetPixel(curr_image, x, y)
                prev_pixel = GetPixel(prev_image, x, y)
                diff = Abs(curr_pixel - prev_pixel)
                If diff > threshold
                    diff_pixels = diff_pixels + 1
                EndIf
            Next
        Next
        ; If there are changed pixels, display the filename
        If diff_pixels > 0
            Debug "Motion detected in " + image_files$[i]
        EndIf
    EndIf
    ; Store the current image as the previous image for the next iteration
    prev_image = curr_image
Next
Code: Select all
; --- Example of using PB_Crypto library to encrypt and decrypt a string with ChaCha20 ---
; Load the PB_Crypto library
If OpenLibrary(#PB_Any, "PB_Crypto.pb", 0) = 0
  MessageRequester("Error", "Could not load PB_Crypto library!", 0)
  End
EndIf
; Define the key and nonce
Global Key.s = "0123456789ABCDEF0123456789ABCDEF"  ; 256-bit key
Global Nonce.s = "01234567"                       ; 64-bit nonce
; Define the plaintext string to be encrypted
Global Plaintext.s = "This is a test string to be encrypted with ChaCha20."
; Encrypt the plaintext string
Global Ciphertext.s = PB_Crypto_ChaCha20_Encrypt(Plaintext, Key, Nonce)
; Print the encrypted ciphertext
PrintN("Ciphertext: " + Ciphertext)
; Decrypt the ciphertext string
Global DecryptedPlaintext.s = PB_Crypto_ChaCha20_Decrypt(Ciphertext, Key, Nonce)
; Print the decrypted plaintext
PrintN("Decrypted plaintext: " + DecryptedPlaintext)
; Unload the PB_Crypto library
CloseLibrary(0)
Code: Select all
;manually load a exe
; Define the path to the executable file
Define ExePath$ = "C:\Path\To\Executable.exe"
; Open the file and read it into a buffer
Protected ExeData.i, ExeSize.i
OpenFile(ExePath$, #PB_Open_Read | #PB_Open_Binary)
ExeSize = FileSize(ExePath$)
AllocateMemory(ExeData, ExeSize)
ReadFile(ExePath$, ExeData, ExeSize)
CloseFile(ExePath$)
; Get the DOS header
Protected DosHeader.s = PeekS(ExeData, 0)
; Get the offset of the NT headers
Protected NtHeaderOffset.i = PeekL(ExeData, 0x3C)
; Get the NT header
Protected NtHeader.s = PeekS(ExeData, NtHeaderOffset)
; Get the entry point address
Protected EntryPoint.i = PeekL(ExeData, NtHeaderOffset + 0x28)
; Jump to the entry point
CallFunction(ExeData + EntryPoint)
Code: Select all
;Simple PE section compresn in PureBasic
; This program compresses the code section of a PE file and creates a new compressed PE file that can still be executed.
; Define constants
Enumeration
    MAX_PATH = 260
    BUF_SIZE = 8192
EndEnumeration
; Declare variables
Protected inPath.s, outPath.s, inFile.s, outFile.s
Protected inHandle.i, outHandle.i
Protected fileSize.i, bytesRead.i, bytesWritten.i
Protected buffer.i, compressedBuffer.i, compressedSize.i
Protected i.i, codeSize.i, compressedCodeSize.i, compressedCodeOffset.i
Protected entryPoint.i, decompressedCodeOffset.i
; Get the input file path
inPath = RequestFile("Select input file", "", "", #PB_FileRequest_ExistingFile)
If inPath = ""
    Debug "No input file selected"
    End
EndIf
; Open the input file for reading
inHandle = OpenFile(inPath, #PB_File_Read)
If inHandle = 0
    Debug "Could not open input file"
    End
EndIf
; Get the input file size
fileSize = FileSize(inHandle)
; Read the input file into a buffer
buffer = AllocateMemory(BUF_SIZE)
Do
    bytesRead = ReadBytes(inHandle, buffer, BUF_SIZE)
Until bytesRead = 0
; Find the code section in the input file
For i = 0 To fileSize - 4
    If PeekL(buffer + i) = &H4065632E ; "e.c@"
        codeSize = PeekL(buffer + i + 8)
        compressedBuffer = AllocateMemory(codeSize)
        compressedCodeOffset = i + 24
        compressedCodeSize = CompressLZ77(buffer + compressedCodeOffset, compressedBuffer, codeSize)
        Exit
    EndIf
Next
If codeSize = 0
    Debug "Could not find code section in input file"
    End
EndIf
; Set the entry point to the original entry point of the input file
entryPoint = PeekL(buffer + 40)
; Get the output file path
outPath = RequestFile("Select output file", "", "", #PB_FileRequest_Save)
If outPath = ""
    Debug "No output file selected"
    End
EndIf
; Open the output file for writing
outHandle = OpenFile(outPath, #PB_File_Write)
If outHandle = 0
    Debug "Could not open output file"
    End
EndIf
; Write the input file header to the output file
inFile = ExtractFilename(inPath)
bytesRead = BUF_SIZE
While bytesRead = BUF_SIZE
    bytesRead = ReadBytes(inHandle, buffer, BUF_SIZE)
    bytesWritten = WriteBytes(outHandle, buffer, bytesRead)
Wend
; Write the decompression routine to the output file
decompressedCodeOffset = fileSize + compressedCodeSize
bytesWritten = WriteBytes(outHandle, "B9" + Hex(entryPoint, 8) + "E9" + Hex(decompressedCodeOffset - entryPoint - 5, 8))
If bytesWritten <> 5
    Debug "Error writing to output file"
    End
EndIf
; Write the compressed code section to the output file
bytesWritten = WriteBytes(outHandle, compressedBuffer, compressedCodeSize)
If bytesWritten <> compressedCodeSize
    Debug "Error writing to output file"
    End
EndIf
; Write the rest of the input file to the output file
Do
   
Code: Select all
;Neural network 
; Declare constants
HiddenLayerSize = 4
InputSize = 2
OutputSize = 1
LearningRate = 0.5
; Declare variables
input[InputSize]
hiddenLayer[HiddenLayerSize]
output[OutputSize]
hiddenWeights[InputSize, HiddenLayerSize]
outputWeights[HiddenLayerSize, OutputSize]
; Initialize weights randomly
For i = 0 To InputSize - 1
  For j = 0 To HiddenLayerSize - 1
    hiddenWeights[i, j] = Random(0, 1)
  Next
Next
For i = 0 To HiddenLayerSize - 1
  For j = 0 To OutputSize - 1
    outputWeights[i, j] = Random(0, 1)
  Next
Next
; Declare activation function (sigmoid)
Function Sigmoid(x.f)
  Return 1 / (1 + Exp(-x))
EndFunction
; Declare training function
Procedure TrainNetwork(targetOutput.f)
  ; Calculate output of hidden layer
  For i = 0 To HiddenLayerSize - 1
    hiddenLayer[i] = 0
    For j = 0 To InputSize - 1
      hiddenLayer[i] += input[j] * hiddenWeights[j, i]
    Next
    hiddenLayer[i] = Sigmoid(hiddenLayer[i])
  Next
  
  ; Calculate output of output layer
  For i = 0 To OutputSize - 1
    output[i] = 0
    For j = 0 To HiddenLayerSize - 1
      output[i] += hiddenLayer[j] * outputWeights[j, i]
    Next
    output[i] = Sigmoid(output[i])
  Next
  
  ; Calculate error and adjust weights
  outputError = targetOutput - output[0]
  
  For i = 0 To HiddenLayerSize - 1
    hiddenError = outputError * outputWeights[i, 0] * hiddenLayer[i] * (1 - hiddenLayer[i])
    For j = 0 To InputSize - 1
      hiddenWeights[j, i] += LearningRate * hiddenError * input[j]
    Next
    outputWeights[i, 0] += LearningRate * outputError * hiddenLayer[i]
  Next
EndProcedure
; Train the network on some example inputs and target outputs
For i = 1 To 10000
  input[0] = 0
  input[1] = 0
  TrainNetwork(0)
  
  input[0] = 0
  input[1] = 1
  TrainNetwork(1)
  
  input[0] = 1
  input[1] = 0
  TrainNetwork(1)
  
  input[0] = 1
  input[1] = 1
  TrainNetwork(0)
Next
; Test the network on some example inputs
input[0] = 0
input[1] = 0
TrainNetwork(0)
Debug output[0]
input[0] = 0
input[1] = 1
TrainNetwork(1)
Debug output[0]
input[0] = 1
input[1] = 0
TrainNetwork(1)
Debug output[0]
input[0] = 1
input[1] = 1
TrainNetwork(0)
Debug output[0]
Code: Select all
;RNN
; Declare constants
NumInputs = 2
NumHidden = 4
NumOutputs = 1
LearningRate = 0.1
; Declare variables
input[NumInputs]
hidden[NumHidden]
output[NumOutputs]
hiddenWeights[NumHidden, NumHidden + NumInputs]
outputWeights[NumOutputs, NumHidden]
; Initialize weights randomly
For i = 0 To NumHidden - 1
  For j = 0 To NumHidden + NumInputs - 1
    hiddenWeights[i, j] = Random(0, 1)
  Next
Next
For i = 0 To NumOutputs - 1
  For j = 0 To NumHidden - 1
    outputWeights[i, j] = Random(0, 1)
  Next
Next
; Declare activation function (sigmoid)
Function Sigmoid(x.f)
  Return 1 / (1 + Exp(-x))
EndFunction
; Declare training function
Procedure TrainNetwork(targetOutput.f)
  ; Calculate output of hidden layer
  For i = 0 To NumHidden - 1
    hidden[i] = 0
    For j = 0 To NumInputs - 1
      hidden[i] += input[j] * hiddenWeights[i, j]
    Next
    For j = 0 To NumHidden - 1
      hidden[i] += hidden[j] * hiddenWeights[i, j + NumInputs]
    Next
    hidden[i] = Sigmoid(hidden[i])
  Next
  
  ; Calculate output of output layer
  For i = 0 To NumOutputs - 1
    output[i] = 0
    For j = 0 To NumHidden - 1
      output[i] += hidden[j] * outputWeights[i, j]
    Next
    output[i] = Sigmoid(output[i])
  Next
  
  ; Calculate error and adjust weights
  outputError = targetOutput - output[0]
  
  For i = 0 To NumOutputs - 1
    For j = 0 To NumHidden - 1
      outputWeights[i, j] += LearningRate * outputError * hidden[j]
    Next
  Next
  
  For i = 0 To NumHidden - 1
    hiddenError = outputError * outputWeights[0, i] * hidden[i] * (1 - hidden[i])
    For j = 0 To NumInputs - 1
      hiddenWeights[i, j] += LearningRate * hiddenError * input[j]
    Next
    For j = 0 To NumHidden - 1
      hiddenWeights[i, j + NumInputs] += LearningRate * hiddenError * hidden[j]
    Next
  Next
  
  ; Update previous hidden layer for next iteration
  previousHidden[] = hidden[]
EndProcedure
; Train the network on some example inputs and target outputs
previousHidden[NumHidden]
For i = 1 To 10000
  input[0] = 0
  input[1] = 0
  TrainNetwork(0)
  
  input[0] = 0
  input[1] = 1
  TrainNetwork(1)
  
  input[0] = 1
  input[1] = 0
  TrainNetwork(1)
  
  input[0] = 1
  input[1] = 1
  TrainNetwork(0)
Next
; Test the network on some example inputs
previousHidden[] = 0
input[0] = 0
input[1] = 0
Code: Select all
;LSTM(used for stock/time-series index predictions)
; Declare constants
NumInputs = 2
NumHidden = 4
NumOutputs = 1
LearningRate = 0.1
; Declare variables
input[NumInputs]
cell[NumHidden]
hidden[NumHidden]
output[NumOutputs]
cellWeights[NumHidden, NumHidden + NumInputs]
hiddenWeights[NumOutputs, NumHidden]
forgetGateWeights[NumHidden, NumHidden + NumInputs]
inputGateWeights[NumHidden, NumHidden + NumInputs]
outputGateWeights[NumHidden, NumHidden + NumInputs]
forgetGate[NumHidden]
inputGate[NumHidden]
outputGate[NumHidden]
cellInput[NumHidden]
cellState[NumHidden]
previousCell[NumHidden]
previousHidden[NumHidden]
; Initialize weights randomly
For i = 0 To NumHidden - 1
  For j = 0 To NumHidden + NumInputs - 1
    cellWeights[i, j] = Random(0, 1)
    forgetGateWeights[i, j] = Random(0, 1)
    inputGateWeights[i, j] = Random(0, 1)
    outputGateWeights[i, j] = Random(0, 1)
  Next
Next
For i = 0 To NumOutputs - 1
  For j = 0 To NumHidden - 1
    hiddenWeights[i, j] = Random(0, 1)
  Next
Next
; Declare activation function (sigmoid)
Function Sigmoid(x.f)
  Return 1 / (1 + Exp(-x))
EndFunction
; Declare activation function (tanh)
Function Tanh(x.f)
  Return (Exp(x) - Exp(-x)) / (Exp(x) + Exp(-x))
EndFunction
; Declare training function
Procedure TrainNetwork(targetOutput.f)
  ; Calculate output of input and forget gates
  For i = 0 To NumHidden - 1
    cellInput[i] = 0
    For j = 0 To NumInputs - 1
      cellInput[i] += input[j] * cellWeights[i, j]
    Next
    For j = 0 To NumHidden - 1
      cellInput[i] += previousHidden[j] * cellWeights[i, j + NumInputs]
    Next
    cellInput[i] = Tanh(cellInput[i])
    
    forgetGate[i] = 0
    For j = 0 To NumInputs - 1
      forgetGate[i] += input[j] * forgetGateWeights[i, j]
    Next
    For j = 0 To NumHidden - 1
      forgetGate[i] += previousHidden[j] * forgetGateWeights[i, j + NumInputs]
    Next
    forgetGate[i] = Sigmoid(forgetGate[i])
  Next
  
  ; Calculate output of input gate
  For i = 0 To NumHidden - 1
    inputGate[i] = 0
    For j = 0 To NumInputs - 1
      inputGate[i] += input[j] * inputGateWeights[i, j]
    Next
    For j = 0 To NumHidden - 1
      inputGate[i] += previousHidden[j] * inputGateWeights[i, j + NumInputs]
    Next
    inputGate[i] = Sigmoid(inputGate[i])
  Next
  
  ; Calculate output of output gate
  For i = 0 To NumHidden - 1
    outputGate[i] = 0
    For j = 0 To NumInputs - 1
      outputGate[i
    outputGate[i] += previousHidden[j] * outputGateWeights[i, j + NumInputs]
  Next
    outputGate[i] = Sigmoid(outputGate[i])
  Next
  
  ; Calculate new cell state
  For i = 0 To NumHidden - 1
    cellState[i] = forgetGate[i] * previousCell[i] + inputGate[i] * cellInput[i]
  Next
  
  ; Calculate new hidden state and output
  For i = 0 To NumOutputs - 1
    For j = 0 To NumHidden - 1
      hidden[j] = Sigmoid(cellState[j]) * outputGate[j]
      output[i] = 0
      For k = 0 To NumHidden - 1
        output[i] += hidden[k] * hiddenWeights[i, k]
      Next
    Next
  Next
  
  ; Calculate error
  error = targetOutput - output[0]
  
  ; Update weights
  For i = 0 To NumHidden - 1
    For j = 0 To NumInputs - 1
      cellWeights[i, j] += LearningRate * error * hiddenWeights[0, i] * outputGate[i] * (1 - Tanh(cellInput[i]) ^ 2) * input[j]
      forgetGateWeights[i, j] += LearningRate * error * hiddenWeights[0, i] * previousCell[i] * forgetGate[i] * (1 - forgetGate[i]) * input[j]
      inputGateWeights[i, j] += LearningRate * error * hiddenWeights[0, i] * cellInput[i] * inputGate[i] * (1 - inputGate[i]) * input[j]
      outputGateWeights[i, j] += LearningRate * error * hiddenWeights[0, i] * Sigmoid(cellState[i]) * outputGate[i] * (1 - outputGate[i]) * input[j]
    Next
    For j = 0 To NumHidden - 1
      cellWeights[i, j + NumInputs] += LearningRate * error * hiddenWeights[0, i] * outputGate[i] * (1 - Tanh(cellInput[i]) ^ 2) * previousHidden[j]
      forgetGateWeights[i, j + NumInputs] += LearningRate * error * hiddenWeights[0, i] * previousCell[i] * forgetGate[i] * (1 - forgetGate[i]) * previousHidden[j]
      inputGateWeights[i, j + NumInputs] += LearningRate * error * hiddenWeights[0, i] * cellInput[i] * inputGate[i] * (1 - inputGate[i]) * previousHidden[j]
      outputGateWeights[i, j + NumInputs] += LearningRate * error * hiddenWeights[0, i] * Sigmoid(cellState[i]) * outputGate[i] * (1 - outputGate[i]) * previousHidden[j]
    Next
  Next
  
  For i = 0 To NumOutputs - 1
    For j = 0 To NumHidden - 1
      hiddenWeights[i, j] += LearningRate * error * hidden[j]
    Next
  Next
  
  ; Update previous cell and hidden states
  For i = 0 To NumHidden - 1
    previousCell[i] = cellState[i]
    previousHidden[i] = hidden[i]
  Next
EndProcedure