Rest API script to pull report from BigFix Web Report

Hello All,

Is it possible to pull Saved Report from BigFix Web Report using Rest API ?

Regards,
Manish Singh

yes you can . I am using python code for all sort of reporting/action etc.

If you’re referring to pulling a specific saved report, no, there is not. However, the data in a saved report should be accessible via a REST API call.

What sort of saved report data are you looking to extract via the REST API?

Hi Aram,

I have the report in BigFix WebReport which contains below filed which I wanted to pull via Rest API.

  1. Relevant and Remediated Fixlet Report.
  2. Last Boot time.
  3. Last patch installed date and time.

and next report is Interactive Fixlet Compliance Reports v2.0 (Fixlet Compliance by Computer Group or Content)

Regards,
Manish Singh

Would it be possible for you to provide the logic which you have used?

Hi Aram,

Is it possible ?

Regards,
Manish Singh

Hi,

Could you please post your REST API code in python here?.I also want to pull the bigfix webreport using python script for rest API.

here is the sample code. I put all the server in the group and then run the report after the action is complete.
I run all my python rest api action similar fashion. When action completed it save the cumulative output in the log and the below code read that log to get the idea of what code has done . I also put the servers in the group so that group name and action name has “CHGXXX” string in it to get the actionid corresponding to bigfix action against the computer group.

import xml.etree.cElementTree as ET
from xml.etree import ElementTree
import requests
import sys,traceback
import subprocess
import os.path
import time
import shutil

import urllib3
urllib3.disable_warnings()

sat6_linux_taskid="1642542"  #1642542  xyz  fixlet
bigfix_URL="https://xyz.org:52311/api/query?relevance="
bigfix_URL_client_query="https://xyz:52311/api/clientquery"
bigfix_URL_client_query_res="https://xyz:52311/api/clientqueryresults"

bigfix_username="user"
bigfix_pwd="pwd"

action_linux_xml="/tmp/dyna_action_linux.xml"
action_linux_grp_csv="/tmp/dyna_grp_lin.csv"

action_id_linux=""

tmp_file="/tmp/temp_dyna.txt"
final_csv="/tmp"

query_sleep_time=90

list_splunk_pkg_vers={}
list_splunk_log={}
list_comp_grp=[]

query_relevance_linux="(name of computer of it, ids of action of it, detailed status of it,name of action of it) of results of bes actions whose (id of source fixlet of it = "+sat6_linux_taskid+"  AND now - time issued of it < 2 * day )"

action_url_linux=""+bigfix_URL+""+query_relevance_linux+""

def get_sat6_log(computer_grp,site):
   xml_query='<BESAPI xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="BESAPI.xsd"> \
     <ClientQuery> \
    <ApplicabilityRelevance>true</ApplicabilityRelevance>\
    <QueryText> if (exists file "/var/tmp/bftask_dyna_status.log") then lines of file "/var/tmp/bftask_dyna_status.log" else "bftask_dyna_status.log  missing"</QueryText> \
    <Target> \
       <ComputerGroup> \
        <Name>'+computer_grp+'</Name> \
        <SiteName>'+site+'</SiteName> \
      </ComputerGroup> \
    </Target> \
   </ClientQuery> \
   </BESAPI>'

   print("\nXM Splk LOG:" +xml_query)
   req_bes_splunkfwd_log=requests.post(bigfix_URL_client_query,verify=False,auth=(bigfix_username,bigfix_pwd),data=xml_query)
   #print(req_bes_splunkfwd.text)
   tree2 = ET.fromstring(req_bes_splunkfwd_log.text)
   query_ID=""
   for fixlet in tree2.findall('.//ClientQuery'):
     query_ID=fixlet.find("ID").text
   bigfix_URL_query_res=bigfix_URL_client_query_res+"/"+query_ID
   print("Sleeping for "+str(query_sleep_time)+"sec to let the LOG Query to finish:" )
   print("Computer grp :"+computer_grp+" Site: "+site)
   print("Query LOG Result URL: "+bigfix_URL_query_res)
   time.sleep(query_sleep_time)
   if query_ID.isdigit():
     req_bes_splunkfwd_log=requests.get(bigfix_URL_query_res,verify=False,auth=(bigfix_username,bigfix_pwd))
     #print(req_bes_splunkfwd_log.text)

   tree2 = ET.fromstring(req_bes_splunkfwd_log.text)
   result=tree2.findall('.//QueryResult')
   if result:
    for fixlet in tree2.findall('.//QueryResult'):
      computername=fixlet.find("ComputerName").text
      result=fixlet.find("Result").text
      j=result+","+computername
      #print(" sat6 log: "+j)
      list_splunk_log[computername]=result


def parse_xml(xml_file):
  #print("List list_splunk_log"+ str(list_splunk_log))
  res=""
  splunkfwd_vers=""
  action_xml=xml_file
  tree= ET.ElementTree(file=action_xml)
  root = tree.getroot()
  result=tree.findall('.//Tuple')
  sat6_chk_stat=""
  if result:
   for fixlet in tree.findall('.//Tuple'):
    for i in fixlet.findall(".//Answer"):
     if i.text.lower().startswith("xyz"):
       servername=i.text.lower()
       if servername in list_splunk_log:
         splunkfwd_log_res=list_splunk_log[servername]
       else:
         splunkfwd_log_res=""
       res=i.text+","+splunkfwd_log_res
     else:
       res=i.text+","+res
    #print("RES: "+res)
    write_to_csv.write(res.replace(" ","")+"\n")
    res=""

def get_sat6_grpname(CHG,action_id_linux,os):
  if os == "linux":
    CSV=action_linux_grp_csv

  print("Computer grp in List" +str(list_comp_grp))
  site_s="grep "+action_id_linux+" "+CSV+" "+"| tail -1|awk -F, '{print $3}'"
  print ("Statement :"+site_s+" CSV"+CSV)
  ps = subprocess.Popen(site_s,shell=True,stdout=subprocess.PIPE,stderr=subprocess.STDOUT)
  site=ps.communicate()[0].strip()

  computer_grp_s="grep "+action_id_linux+" "+CSV+" "+"| tail -1|awk -F, '{print $2}'"
  ps = subprocess.Popen(computer_grp_s,shell=True,stdout=subprocess.PIPE,stderr=subprocess.STDOUT)
  computer_grp=ps.communicate()[0].strip()

  print("Site  :"+site )
  print("Grup  :"+computer_grp)
  print("CSV   :"+CSV)
  print("os    :"+os)
  print("CHG    :"+CHG)
  get_sat6_log(computer_grp,site)


#print ("linux URL: " +action_url_linux)

req_action_linux=requests.get(action_url_linux,verify=False,auth=(bigfix_username,bigfix_pwd))
write_to_xml_linux = open(action_linux_xml,'w+')
write_to_xml_linux.write(req_action_linux.text)
write_to_xml_linux.close()


lst_action_id_lin=[]
remove_action_id_lin=[]

tree= ET.ElementTree(file=action_linux_xml)
root = tree.getroot()
result=tree.findall('.//Tuple')
if result:
 for fixlet in tree.findall('.//Tuple'):
  for j in fixlet.findall(".//Answer"):
     if j.text.isdigit():
      action_id_linux=j.text
      lst_action_id_lin.append(action_id_linux)

#print ("ActionID Linux: "+str(lst_action_id_lin))

k1=""
write_to_lin_grp_csv = open(action_linux_grp_csv,'w+')
lst_action_id_lin_uniq=set(lst_action_id_lin)
if len(lst_action_id_lin_uniq)> 0:
 print("The action ID for Linux is: "+str(lst_action_id_lin_uniq))

for action_id_linux in lst_action_id_lin_uniq:
 if action_id_linux.isdigit():
   query_computer_grp_linux='(item 1 of item 0 of it,item 0 of item 0 of it, item 0 of item 1 of it) of ((name of it, name of site of it ) of bes computer groups, (id of it, ( concatenation "" of substrings separated by "By Group" of it) of ( concatenation "" of substrings separated by "All Computers" of it ) of selected groups string of it as trimmed string) of bes actions whose (id of it = '+action_id_linux+' )) whose (item 1 of item 1 of it contains item 0 of item 0 of it )'

   grp_url_linux=""+bigfix_URL+""+query_computer_grp_linux+""
   grp_action_linux=requests.get(grp_url_linux,verify=False,auth=(bigfix_username,bigfix_pwd))
   #print (action_id_linux+" Query Linux grp :" +grp_url_linux )
   print("\nLin: "+grp_action_linux.text)

   tree2 = ET.fromstring(grp_action_linux.text)
   result=tree2.findall('.//Tuple')
   if result:
    for fixlet in tree2.findall('.//Tuple'):
     for j in fixlet.findall(".//Answer"):
      k1=j.text+","+k1
     #print(k1)
     write_to_lin_grp_csv.write(k1+"\n")
     list_comp_grp.append(k1)
     k1=""
   else:
    print("No Tuple found for Group(Linux): "+sat6_linux_taskid+" ActionID: "+action_id_linux)
    remove_action_id_lin.append(action_id_linux)
 else:
  print("No ActionID found for TaskID(Linux):  "+ sat6_linux_taskid)
write_to_lin_grp_csv.close()


if len(lst_action_id_lin_uniq)> 0:
 for action_id  in remove_action_id_lin:
  lst_action_id_lin_uniq.remove(action_id)
 print("The Revised action ID for Linux is: "+str(lst_action_id_lin_uniq))


CHG=""
write_to_csv = open(tmp_file,'w+')

for action_id_linux in lst_action_id_lin_uniq:
 if action_id_linux.isdigit():
  if os.path.isfile(action_linux_xml):
   datafile = file(action_linux_xml)
   for line in datafile:
    for x in line.split():
     if x.startswith("CHG"):
         CHG=x
         break
   print(CHG+ " \nAction Id in loop : "+action_id_linux)
   get_sat6_grpname(CHG,action_id_linux,"linux")
if len(lst_action_id_lin_uniq) > 0:
 parse_xml(action_linux_xml)

write_to_csv.close()

print("Change is: "+CHG)

if os.stat(tmp_file).st_size != 0:
 os.rename(tmp_file, final_csv+"/"+CHG+".txt")
 #shutil.copy(tmp_file, final_csv+"/"+CHG+".txt")
 os.chmod(final_csv+"/"+CHG+".txt", 0644)

print ("The report should be in "+final_csv+""+CHG +".txt")