# Original version taken from http://djangosnippets.org/snippets/186/# Original author: udfalkso# Modified version taken from http://djangosnippets.org/snippets/605/# Modified by: Shwagroo Team# Modified further by Doug StrykeimportcProfileimportosimportpstatsimportreimportresourceimporttempfileimporttimefromcollectionsimportdefaultdictfromioimportStringIOfromtypingimportAny,Dict# noqafromdjango.confimportsettingsfromdjango.dbimportconnections# FIXME change log base to something appropriate for the particular installationPROFILE_LOG_BASE="/var/tmp/django"# FIXME change if neededDATABASE_CONNECTION="default"time_columns=[("Middleware time","middleware_time"),("Elapsed time","total_time"),("User cpu","utime"),("System cpu","stime"),("Query time","query_time"),("Query count","query_count"),("Max RSS(kb)","max_rss"),]time_labels=[col[0]forcolintime_columns]time_fields=[col[1]forcolintime_columns]time_stats={}# type: Dict[float,Dict[str,Any]]# Django middleware is class-based. The module is imported, the profiling class# is instantiated, and the instance added to the middleware stack.# We put this profiling module on both ends of the middleware stack to obtain# the time spent in middleware. Consequently we have one imported module but# two instances so most state is maintained at the module level._start_time=None_middleware_start_time=None_current_stats=Nonewords_re=re.compile(r"\s+")group_prefix_re=[re.compile("^.*/django/[^/]+"),re.compile("^(.*)/[^/]+$"),re.compile(".*")]# extract module path # catch strange entries_log_file_path=None_prof=None_profile_layer=False
[docs]classProfileMiddleware:"""Displays timing or profiling for any view. http://yoursite.com/yourview/?time or http://yoursite.com/yourview/?prof Record the time taken by Django and the project code to generate the web page and by the database to make the queries. Accumulate repeated web page visits and display the accumulated statistics in the web browser in place of the normal web page. The availability of this middleware is controlled by the DEBUG flas in settings.py. To time a page, add '?time' (or '&time'). To clear out accumulated timings, add '&reset'. Alternatively, profile the python function calls executed to create the web page and display the profiling statistics in the web browser. To profile a page, add '?prof' (or '&prof') to the url. By default the statistics are sorted by cumulative time. To specify an alternative sort, add one or more 'sort' parameters, e.g., '&sort=time&sort=calls'. By default the display is limited to the first 40 function calls. To specify an alternative, add a limit parameter, e.g., '&limit=.50'. To write the profile data to a permanent file in `PROFILE_LOG_BASE` add '&log'. To strip the directory paths from the profile statistics, add '&strip'. (Note that the file and group summaries are also removed by 'strip'.) """
defprocess_request(self,request):global_middleware_start_time,_start_time,_log_file_path,_profifsettings.DEBUG:if"time"inrequest.GET:# outer layer of middleware onionif_middleware_start_timeisNone:_middleware_start_time=time.time()else:# inner layer of onionif"reset"inrequest.GET:time_stats.clear()else:self._auto_reset_time(request)_start_time=time.time()self._start_rusage=resource.getrusage(resource.RUSAGE_SELF)elif"prof"inrequest.GET:if_profisNone:_prof=cProfile.Profile()if"log"inrequest.GETand_log_file_pathisNone:_log_file_path=get_log_file_path("middleware.profile",time.gmtime())defprocess_view(self,request,callback,callback_args,callback_kwargs):ifsettings.DEBUGand("prof"inrequest.GET):return_prof.runcall(callback,request,*callback_args,**callback_kwargs)returnNonedefprocess_response(self,request,response):# pylint: disable=unsupported-assignment-operation,unsubscriptable-objectglobal_current_stats,_middleware_start_time,_log_file_path,_profile_layer,_profifsettings.DEBUG:if"time"inrequest.GET:if_current_statsisNone:# inner layer of onion_current_stats=self._collect_time()else:# outer layer of onion_current_stats["middleware_time"]=time.time()-_middleware_start_time-_current_stats["total_time"]self._display_time(request,response)_middleware_start_time=None_current_stats=Noneelif"prof"inrequest.GET:if_profile_layer:self._profile(request,response)_prof=None_log_file_path=None_profile_layer=Falseelse:_profile_layer=Truereturnresponse# detect when we start timing a different page and auto-reset stats# (another option would be to maintain separate stats)def_auto_reset_time(self,request):uri=self._get_real_uri(request)ifself._last_uriisNone:self._last_uri=urielifuri!=self._last_uri:time_stats.clear()self._last_uri=uri_reUri=re.compile(r"^(http.+/)[?&]time.*$")def_get_real_uri(self,request):uri=request.build_absolute_uri()m=self._reUri.match(uri)ifmisnotNone:returnm.group(1)returnNonedef_collect_time(self):# record stats from this requeststats=time_stats.setdefault(_start_time,{})stats["total_time"]=time.time()-_start_timeself._end_rusage=resource.getrusage(resource.RUSAGE_SELF)stats["max_rss"]=self._end_rusage.ru_maxrssstats["utime"]=self._elapsed_ru("ru_utime")stats["stime"]=self._elapsed_ru("ru_stime")queries=connections["DATABASE_CONNECTION"].queriesstats["query_time"]=sum(float(query["time"])forqueryinqueries)stats["query_count"]=len(queries)returnstatsdef_display_time(self,request,response):# display all stats since last resetstats_str="%-4s%-24s%13s%14s%10s%12s%12s%12s%12s\n"%tuple(["Req","Start time"]+time_labels)count=0mins=defaultdict(int)sums=defaultdict(int)maxs=defaultdict(int)values=defaultdict(list)forstart_timeinsorted(time_stats.keys()):stats=time_stats[start_time]# accumulate times for summary, except for first requestcount+=1forfieldintime_fields:mins[field]=min(stats[field],mins.get(field,99999))sums[field]+=stats[field]maxs[field]=max(stats[field],maxs[field])values[field].append(stats[field])# display linestats_str+="%-4s%-24s%15.3f%14.3f%10.3f%12.3f%12.3f%12d%12d\n"%tuple([count,time.ctime(start_time)]+[stats[field]forfieldintime_fields])# display summary statisticsstats_str+="\n%-4s%-24s%15.3f%14.3f%10.3f%12.3f%12.3f%12d%12d\n"%tuple(["","Minimum"]+[mins[field]forfieldintime_fields])stats_str+="%-4s%-24s%15.3f%14.3f%10.3f%12.3f%12.3f%12d%12d\n"%tuple(["","Mean"]+[sums[field]/countforfieldintime_fields])stats_str+="%-4s%-24s%15.3f%14.3f%10.3f%12.3f%12.3f%12d%12d\n"%tuple(["","Maximum"]+[maxs[field]forfieldintime_fields])ifcount>1:stats_str+="%-4s%-24s%15.3f%14.3f%10.3f%12.3f%12.3f%12d%12d\n"%tuple(["","Standard deviation"]+[stdev(values[field])forfieldintime_fields])ifresponseandresponse.contentandstats_str:response.content="<pre>%s\n\noptions: &reset (clear all stats)\n\n%s</pre>"%(request.build_absolute_uri(),stats_str)def_elapsed_ru(self,name):returngetattr(self._end_rusage,name)-getattr(self._start_rusage,name)def_profile(self,request,response):out=StringIO()if"log"inrequest.GETand_log_file_pathisnotNone:_prof.dump_stats(_log_file_path)stats=pstats.Stats(_log_file_path,stream=out)else:withtempfile.NamedTemporaryFile(dir="/tmp")astemp_file:_prof.dump_stats(temp_file.name)stats=pstats.Stats(temp_file.name,stream=out)_prof.clear()if"strip"inrequest.GET:stats.strip_dirs()sorts=request.GET.getlist("sort")ifsorts:stats.sort_stats(*sorts)else:stats.sort_stats("cumulative")limit=request.GET.get("limit")iflimitisNone:limit=40else:try:limit=int(limit)exceptValueError:try:limit=float(limit)exceptValueError:passstats.print_stats(limit)stats_str=out.getvalue()ifresponseandresponse.contentandstats_str:response.content="""<pre>%s\n\n options: &log (write data to file) &strip (remove directories) &limit=LIMIT (lines or fraction)\n &sort=KEY (e.g., cumulative (default), time, calls, pcalls, etc.)\n\n%s</pre>"""%(request.build_absolute_uri(),stats_str,)if"strip"notinrequest.GET:response.content+=self._summary_for_files(stats_str)def_get_group(self,file_name):forgingroup_prefix_re:name=g.findall(file_name)ifname:returnname[0]returnNonedef_get_summary(self,results_dict,total):items=[(item[1],item[0])foriteminresults_dict.items()]items.sort(reverse=True)items=items[:40]res=" tottime\n"foriteminitems:res+="%4.1f%%%7.3f%s\n"%(100*item[0]/totaliftotalelse0,item[0],item[1])returnresdef_summary_for_files(self,stats_str):stats_str=stats_str.split("\n")[8:]mystats={}mygroups={}ttl=0forsinstats_str:fields=words_re.split(s)iflen(fields)==7:time_amt=float(fields[2])ttl+=time_amtfile_name=fields[6].split(":",1)[0]iffile_namenotinmystats:mystats[file_name]=0mystats[file_name]+=time_amtgroup=self._get_group(file_name)ifgroupnotinmygroups:mygroups[group]=0mygroups[group]+=time_amtreturn("<pre>"+" ---- By file ----\n\n"+self._get_summary(mystats,ttl)+"\n"+" ---- By group ---\n\n"+self._get_summary(mygroups,ttl)+"</pre>")