chore: add scripts

This commit is contained in:
Denis Evers 2023-06-27 21:54:06 +08:00
parent 13f2d61e19
commit cebe5eaf1a
Signed by: denis-ev
GPG Key ID: 10BFC1EB323A6CA8
3 changed files with 160 additions and 0 deletions

View File

@ -0,0 +1,76 @@
#!/bin/bash
# Check if a parameter is provided
if [[ -z $1 ]]; then
echo "Usage: ./create_template.sh <qcow2_file>"
exit 1
fi
# Set the qcow2 file parameter
qcow2_file=$1
# Prompt for VMID
read -p "Enter the VMID: " vmid
# Check if VMID is already in use
if qm config "$vmid" &> /dev/null; then
read -p "VMID $vmid is already in use. Do you want to set a new VMID (n), delete the current VMID (d), or abort the script (a)? " response
case $response in
n)
read -p "Enter the new VMID: " new_vmid
vmid="$new_vmid"
;;
d)
qm stop "$vmid"
qm destroy "$vmid"
;;
a)
echo "Script aborted."
exit 1
;;
*)
echo "Invalid response. Script aborted."
exit 1
;;
esac
fi
# Prompt for datastore
read -p "Enter the datastore (local-lvm): " datastore
# Prompt for template name
read -p "Enter the template name: " template_name
# Prompt for agent installation
read -p "Is the QEMU Guest Agent already installed in the qcow2? (yes/no): " agent_installed
if [[ $agent_installed != "yes" ]]; then
echo "QEMU Guest Agent installation command: virt-customize --install qemu-guest-agent -a $qcow2_file"
echo "Aborting script."
exit 1
fi
# Create VM
echo "Creating VM $vmid..."
qm create "$vmid" --name "$template_name" --memory 2048 --net0 virtio,bridge=vmbr0
# Import disk
echo "Importing disk..."
qm importdisk "$vmid" "$qcow2_file" "$datastore"
# Set SCSI controller
echo "Setting SCSI controller..."
qm set "$vmid" --scsihw virtio-scsi-pci --scsi0 "$datastore:vm-$vmid-disk-0"
# Set cloud-init
echo "Setting cloud-init..."
qm set "$vmid" --ide2 "$datastore:cloudinit"
# Set boot disk
echo "Setting boot disk..."
qm set "$vmid" --boot c --bootdisk scsi0
# Create template
echo "Creating template..."
qm template "$vmid"
echo "Script execution completed successfully."

40
gitea_raw_links.sh Executable file
View File

@ -0,0 +1,40 @@
#!/bin/bash
get_raw_links() {
local url=$1
local token=$2
local response=$(curl -s -X 'GET' "$url" -H 'accept: application/json' -H "Authorization: token $token")
local entries=$(echo "$response" | jq -c '.[]')
while IFS= read -r entry; do
local entry_type=$(echo "$entry" | jq -r '.type')
if [[ "$entry_type" == "file" ]]; then
local download_url=$(echo "$entry" | jq -r '.download_url')
echo "$download_url"
elif [[ "$entry_type" == "dir" ]]; then
local dir_url=$(echo "$entry" | jq -r '.url')
get_raw_links "$dir_url" "$token"
fi
done <<<"$entries"
}
if [ $# -lt 1 ]; then
echo "Usage: ./gitea_raw_links.sh <repository_url> [token]"
exit 1
fi
repository_url=$1
token=$2
# Extract username and repository name from the repository URL
username=$(basename "$(dirname "$repository_url")")
reponame=$(basename "$repository_url")
api_url="https://git.evers.sh/api/v1/repos/${username}/${reponame}/contents"
if [ -n "$token" ]; then
api_url="${api_url}?token=${token}"
fi
get_raw_links "$api_url" "$token"

44
woolies_scrape_recipes.py Normal file
View File

@ -0,0 +1,44 @@
import requests
from bs4 import BeautifulSoup
from urllib.parse import urljoin
try:
# Set the URL and headers
url = "https://www.woolworths.com.au/shop/recipes/collections/meal-type/mains"
headers = {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.3"
}
# Send a GET request to the website with headers
response = requests.get(url, headers=headers)
# Parse the HTML content
soup = BeautifulSoup(response.text, "html.parser")
print("Soup")
print(soup)
# Find all recipe cards
recipe_cards = soup.find_all("div", class_="recipe-cardContainer")
print("RecipeCards")
print(recipe_cards)
# Extract the links for each recipe
recipe_links = []
base_url = "https://www.woolworths.com.au"
for card in recipe_cards:
link = card.find("a", class_="recipe-card")["href"]
full_link = urljoin(base_url, link)
print(full_link)
recipe_links.append(full_link)
# Write the recipe links to a file
with open("scraped_recipes_links.txt", "w") as file:
for link in recipe_links:
file.write(link + "\n")
# Print a success message
print("Recipe links have been scraped and saved to scraped_recipes_links.txt.")
except Exception as e:
# Print an error message
print("Failed to retrieve recipe links:", str(e))