I'm encountering an issue while trying to deploy my Python-written Firebase function. The error message I'm receiving is as follows:
Error: Failed to parse build specification:
- FirebaseError It seems you are using a newer SDK than this version of the CLI can handle. Please update your CLI with
npm install -g firebase-tools
Here are the details:
Environment:
firebase-admin: 6.4.0 (latest version) same as Firebase Python SDK firebase-tools: 13.4.0 (latest version) same as Firebase CLI Platform: macOS Python: 3.12
Command Used:
firebase deploy --debug --only functions
[2024-02-29T15:30:14.892Z] > command requires scopes:
......
=== Deploying to 'miniproject1-412516'...
......
[2024-02-29T15:30:21.160Z] Customer code is not Node
[2024-02-29T15:30:21.162Z] Validating python source
[2024-02-29T15:30:21.162Z] Building python source
i functions: Loading and analyzing source code for codebase default to determine what to deploy
[2024-02-29T15:30:21.180Z] Found functions.yaml. Got spec: specVersion: 1
version: 1
functions:
collect_and_save_data_to_firebase_storage:
name: collect_and_save_data_to_firebase_storage
runtime: python312
trigger: http
source: main.py
Error: Failed to parse build specification:
- FirebaseError It seems you are using a newer SDK than this version of the CLI can handle. Please update your CLI with `npm install -g firebase-tools`
I've already tried updating the Firebase CLI with npm install -g firebase-tools, but the same error persists. How can I resolve this issue? Any insights would be greatly appreciated. Thank you!
main.py code added!
import requests
import json
import pandas as pd
import os
import concurrent.futures
import time
import firebase_admin
from datetime import datetime,timedelta
from firebase_admin import credentials, storage, initialize_app
from google.cloud import storage as gcs
import sys
import io
def collect_and_save_data_to_firebase_storage():
service_account_key = {
"removed"
}
cred = credentials.Certificate(service_account_key)
initialize_app(cred)
bucket = storage.bucket()
def fetch_data_from_api(auth_key, base_url, lastModTsBgn, lastModTsEnd, result_type, page_index, page_size):
try:
url = f"{base_url}?authKey={auth_key}&pageIndex={page_index}&pageSize={page_size}&lastModTsBgn={lastModTsBgn}&lastModTsEnd={lastModTsEnd}&resultType={result_type}"
response = requests.get(url)
if response.status_code != 200:
print(f"Failed to fetch data from API (Status code: {response.status_code})", file=sys.stderr)
return None
return json.loads(response.text)
except Exception as e:
print(f"An error occurred while fetching data from API: {str(e)}", file=sys.stderr)
return None
def save_data_to_storage(dataframe, storage_path):
try:
csv_buffer = io.StringIO()
dataframe.to_csv(csv_buffer, index=False)
# change to binary
csv_bytes = csv_buffer.getvalue().encode('utf-8-sig')
# upload at Firebase Storage
blob = bucket.blob(storage_path)
blob.upload_from_string(csv_bytes, content_type='text/csv')
print(f"CSV file saved to Firebase Storage: {storage_path}")
except Exception as e:
print(f"An error occurred while saving data to Firebase Storage: {str(e)}", file=sys.stderr)
def process_page(page_data):
try:
filtered_data = []
body = page_data['result']['body']['rows'][0]['row']
for data in body:
if data.get('opnSfTeamCode') == 3740000 and (data.get('trdStateNm') == '휴업' or data.get('trdStateNm') == '폐업'):
filtered_data.append(data)
return filtered_data
except Exception as e:
print(f"An error occurred while processing the page: {str(e)}", file = sys.stderr)
def collect_data(auth_key, base_url, lastModTsBgn, lastModTsEnd, result_type, page_size, num_pages_to_fetch):
filtered_dataset = []
try:
for page_index in range(1, num_pages_to_fetch + 1):
response_data = fetch_data_from_api(auth_key, base_url, lastModTsBgn, lastModTsEnd, result_type, page_index, page_size)
if response_data is not None:
filtered_dataset.extend(process_page(response_data))
return filtered_dataset
except Exception as e:
print(f"An error occurred while collecting the data: {str(e)}", file=sys.stderr)
def get_total_pages(auth_key, base_url, lastModTsBgn, lastModTsEnd, result_type, page_size):
try:
url = f"{base_url}?authKey={auth_key}&pageIndex=1&pageSize={page_size}&lastModTsBgn={lastModTsBgn}&lastModTsEnd={lastModTsEnd}&resultType={result_type}"
response = requests.get(url)
if response.status_code != 200:
print(f"Failed to fetch data from API (Status code: {response.status_code})")
return 0 # or any other appropriate value
data = json.loads(response.text)
total_count = data['result']['header']['paging']['totalCount']
total_pages = total_count // page_size
if total_count % page_size > 0:
total_pages += 1
return total_pages
except Exception as e:
print(f"An error occurred while gettering the total page number: {str(e)}", file = sys.stderr)
# period
latestday = datetime.today() - timedelta(days=0)
maximum_days_ago = datetime.today() - timedelta(days=7)
latestday_str = latestday.strftime('%Y%m%d')
maximum_days_ago_str = maximum_days_ago.strftime('%Y%m%d')
# collect data
auth_key = "removed"
base_url = "http://www.localdata.go.kr/platform/rest/TO0/openDataApi"
lastModTsBgn = maximum_days_ago_str
lastModTsEnd = latestday_str
page_size = 500
result_type = "json"
storage_path = "newData/extracted_values.csv"
num_pages_to_fetch = get_total_pages(auth_key, base_url, lastModTsBgn, lastModTsEnd, result_type, page_size)
refiltered_dataset = collect_data(auth_key, base_url, lastModTsBgn, lastModTsEnd, result_type, page_size, num_pages_to_fetch)
# to dataframe
keys_to_extract = ['영업상태명', '폐업일자', '사업장명', '좌표정보(x)', '좌표정보(y)', '도로명전체주소']
extracted_values = {key: [] for key in keys_to_extract}
for data in refiltered_dataset:
for key in keys_to_extract:
extracted_values[key].append(data.get(key))
extracted_df = pd.DataFrame(extracted_values)
new_columns = ['영업상태명', '폐업일자', '사업장명', '좌표정보(x)', '좌표정보(y)', '도로명전체주소']
extracted_df.columns = new_columns
# delete NA
for column in extracted_df.columns:
extracted_df = extracted_df[extracted_df[column] != '']
# Reset the index
extracted_df.reset_index(drop=True, inplace=True)
# Save CSV file at Firebase Storage
save_data_to_storage(extracted_df, storage_path)
print("CSV file saved to Firebase Storage:", storage_path)
# run
collect_and_save_data_to_firebase_storage()
firebase.json
{
"firestore": {
"rules": "firestore.rules",
"indexes": "firestore.indexes.json"
},
"functions": [
{
"source": "functions",
"codebase": "default",
"ignore": [
"venv",
".git",
"firebase-debug.log",
"firebase-debug.*.log"
]
}
]
}
and i created functions.yaml file because it cause error.
functions.yaml
specVersion: 1
version: 1
functions:
collect_and_save_data_to_firebase_storage:
name: collect_and_save_data_to_firebase_storage
runtime: python312
trigger: pubsub
source: main.py
requirements.txt
firebase-functions~=0.1.0
requests~=2.31.0
pandas~=2.2.1
firebase-admin~=6.4.0
google-cloud-storage~=2.14.0