import pandas as pd
from apiclient.discovery import build
from oauth2client.service_account import ServiceAccountCredentials
df = spark.read.json("/FileStore/tables/cert.json")
 
SCOPES = ['https://www.googleapis.com/auth/analytics.readonly']
KEY_FILE_LOCATION =  "/FileStore/tables/cert.json"
VIEW_ID = '####'
 
 
def initialize_analyticsreporting():
 
  credentials = ServiceAccountCredentials.from_json_keyfile_name(KEY_FILE_LOCATION, SCOPES)
 
  # Build the service object.
  analytics = build('analyticsreporting', 'v4', credentials=credentials)
  return analytics
 
 
def get_report(analytics):
 
  return analytics.reports().batchGet(
      body={
        'reportRequests': [
        {
          'viewId': VIEW_ID,
          'dateRanges': [{'startDate': '7daysAgo', 'endDate': 'today'}],
          'metrics': [{'expression': 'ga:sessions'}],
          'dimensions': [{'name': 'ga:country'}]
        }]
      }
  ).execute()
 
 
def print_response(response):
 
  for report in response.get('reports', []):
    columnHeader = report.get('columnHeader', {})
    dimensionHeaders = columnHeader.get('dimensions', [])
    metricHeaders = columnHeader.get('metricHeader', {}).get('metricHeaderEntries', [])
 
    for row in report.get('data', {}).get('rows', []):
      dimensions = row.get('dimensions', [])
      dateRangeValues = row.get('metrics', [])
 
      for header, dimension in zip(dimensionHeaders, dimensions):
        print(header + ': ', dimension)
 
      for i, values in enumerate(dateRangeValues):
        print('Date range:', str(i))
        for metricHeader, value in zip(metricHeaders, values.get('values')):
          print(metricHeader.get('name') + ':', value)
 
 
def main():
  analytics = initialize_analyticsreporting()
  global df_response
  df_response=[]
  response = []
  response = get_report(analytics)
  
  df_response=pd.DataFrame(list(response))
 
if __name__ == '__main__':
  main()
df_response
im getting an error if want to acces the file location where I stored my credantiel
error:
(1) Spark Jobs
FileNotFoundError: [Errno 2] No such file or directory: '/FileStore/tables/cert.json'
but I can open my file alone without any issue:
df = spark.read.json("/FileStore/tables/cert.json")
display(df)