diff --git a/AUTHORS b/AUTHORS new file mode 100644 index 00000000..fd736cb1 --- /dev/null +++ b/AUTHORS @@ -0,0 +1,7 @@ +# This is the official list of pprof authors for copyright purposes. +# This file is distinct from the CONTRIBUTORS files. +# See the latter for an explanation. +# Names should be added to this file as: +# Name or Organization +# The email address is not required for organizations. +Google Inc. \ No newline at end of file diff --git a/COMPILE.sh b/COMPILE.sh new file mode 100644 index 00000000..6179194b --- /dev/null +++ b/COMPILE.sh @@ -0,0 +1,30 @@ +#!/bin/bash + +# Copyright 2014 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +ROOT=${PWD} + +echo Building pprof at ${ROOT} +GOPATH=${ROOT}:${ROOT}/third_party go build pprof +RETCODE=$? + +if [ ${RETCODE} -eq 0 ] +then + echo Build was successful. pprof available at ${ROOT}/pprof +else + echo Build failed. + exit 1 +fi + diff --git a/CONTRIBUTING b/CONTRIBUTING new file mode 100644 index 00000000..2827b7d3 --- /dev/null +++ b/CONTRIBUTING @@ -0,0 +1,27 @@ +Want to contribute? Great! First, read this page (including the small print at the end). + +### Before you contribute +Before we can use your code, you must sign the +[Google Individual Contributor License Agreement] +(https://cla.developers.google.com/about/google-individual) +(CLA), which you can do online. The CLA is necessary mainly because you own the +copyright to your changes, even after your contribution becomes part of our +codebase, so we need your permission to use and distribute your code. We also +need to be sure of various other things—for instance that you'll tell us if you +know that your code infringes on other people's patents. You don't have to sign +the CLA until after you've submitted your code for review and a member has +approved it, but you must do it before we can put your code into our codebase. +Before you start working on a larger contribution, you should get in touch with +us first through the issue tracker with your idea so that we can help out and +possibly guide you. Coordinating up front makes it much easier to avoid +frustration later on. + +### Code reviews +All submissions, including submissions by project members, require review. We +use Github pull requests for this purpose. + +### The small print +Contributions made by corporations are covered by a different agreement than +the one above, the +[Software Grant and Corporate Contributor License Agreement] +(https://cla.developers.google.com/about/google-corporate). diff --git a/CONTRIBUTORS b/CONTRIBUTORS new file mode 100644 index 00000000..2f73c299 --- /dev/null +++ b/CONTRIBUTORS @@ -0,0 +1,14 @@ +# People who have agreed to one of the CLAs and can contribute patches. +# The AUTHORS file lists the copyright holders; this file +# lists people. For example, Google employees are listed here +# but not in AUTHORS, because Google holds the copyright. +# +# https://developers.google.com/open-source/cla/individual +# https://developers.google.com/open-source/cla/corporate +# +# Names should be added to this file as: +# Name +Raul Silvera +Tipp Moseley +Hyoun Kyu Cho + diff --git a/LICENSE b/LICENSE new file mode 100644 index 00000000..d6456956 --- /dev/null +++ b/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/README.md b/README.md new file mode 100644 index 00000000..79a62a6e --- /dev/null +++ b/README.md @@ -0,0 +1,75 @@ +# Introduction + +pprof is a tool for visualization and analysis of profiling data. + +pprof reads a collection of profiling samples in profile.proto format and +generates reports to visualize and help analyze the data. It can generate both +text and graphical reports (through the use of the dot visualization package). + +profile.proto is a protocol buffer that describes a set of callstacks +and symbolization information. A common usage is to represent a set of +sampled callstacks from statistical profiling. The format is +described on the src/proto/profile.proto file. For details on protocol +buffers, see https://developers.google.com/protocol-buffers + +Profiles can be read from a local file, or over http. Multiple +profiles of the same type can be aggregated or compared. + +If the profile samples contain machine addresses, pprof can symbolize +them through the use of the native binutils tools (addr2line and nm). + +**This is not an official Google product.** + +# Building pprof + +Prerequisites: + +- Go development kit: https://golang.org/dl/ + Known to work with Go 1.5 +- Graphviz: http://www.graphviz.org/ + Optional, used to generate graphic visualizations of profiles + +To build it, run the COMPILE.sh script. The TEST.sh script runs unit tests. + +# Basic usage + +pprof can read a profile from a file or directly from a server via http. +Specify the profile input(s) in the command line, and use options to +indicate how to format the report. + +## Generate a text report of the profile, sorted by hotness: + +``` +% pprof -top [main_binary] profile.pb.gz +Where + main_binary: Local path to the main program binary, to enable symbolization + profile.pb.gz: Local path to the profile in a compressed protobuf, or + URL to the http service that serves a profile. +``` + +## Generate a graph in an SVG file, and open it with a web browser: + +``` +pprof -web [main_binary] profile.pb.gz +``` + +## Run pprof on interactive mode: + +If no output formatting option is specified, pprof runs on interactive mode, +where reads the profile and accepts interactive commands for visualization and +refinement of the profile. + +``` +pprof [main_binary] profile.pb.gz + +This will open a simple shell that takes pprof commands to generate reports. +Type 'help' for available commands/options. +``` + +## Further documentation + +See doc/pprof.md for more detailed end-user documentation. + +See doc/developer/pprof.dev.md for developer documentation. + +See doc/developer/profile.proto.md for a description of the profile.proto format. diff --git a/TEST.sh b/TEST.sh new file mode 100644 index 00000000..dc2fe44f --- /dev/null +++ b/TEST.sh @@ -0,0 +1,30 @@ +#!/bin/bash + +# Copyright 2014 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +ROOT=${PWD} +INTERNAL="internal/binutils internal/driver internal/elfexec internal/graph internal/report internal/symbolizer internal/symbolz" +PACKAGES="profile ${INTERNAL}" + +echo Testing pprof packages at ${ROOT} +GOPATH=${ROOT}:${ROOT}/third_party go test $PACKAGES +RETCODE=$? + +if [ ${RETCODE} -eq 0 ] +then + echo Tests were successful. +else + echo Tests failed. +fi diff --git a/doc/developer/pprof.dev.md b/doc/developer/pprof.dev.md new file mode 100644 index 00000000..b2a197f6 --- /dev/null +++ b/doc/developer/pprof.dev.md @@ -0,0 +1,14 @@ +This is pprof's developer documentation. It discusses how to maintain and extend +pprof. It has yet to be written. + +# How is pprof code structured? + +Internal vs external packages. + +# External interface + +## Plugins + +## Legacy formats + +# Overview of internal packages diff --git a/doc/developer/profile.proto.md b/doc/developer/profile.proto.md new file mode 100644 index 00000000..9932e7e2 --- /dev/null +++ b/doc/developer/profile.proto.md @@ -0,0 +1,147 @@ +This is a description of the profile.proto format. + +# Overview + +Profile.proto is a data representation for profile data. It is independent of +the type of data being collected and the sampling process used to collect that +data. On disk, it is represented as a gzip-compressed protocol buffer, described +at src/proto/profile.proto + +A profile in this context refers to a collection of samples, each one +representing measurements performed at a certain point in the life of a job. A +sample associates a set of measurement values with a list of locations, commonly +representing the program call stack when the sample was taken. + +Tools such as pprof analyze these samples and display this information in +multiple forms, such as identifying hottest locations, building graphical call +graphs or trees, etc. + +# General structure of a profile + +A profile is represented on a Profile message, which contain the following +fields: + +* *sample*: A profile sample, with the values measured and the associated call + stack as a list of location ids. Samples with identical call stacks can be + merged by adding their respective values, element by element. +* *location*: A unique place in the program, commonly mapped to a single + instruction address. It has a unique nonzero id, to be referenced from the + samples. It contains source information in the form of lines, and a mapping id + that points to a binary. +* *function*: A program function as defined in the program source. It has a + unique nonzero id, referenced from the location lines. It contains a + human-readable name for the function (eg a C++ demangled name), a system name + (eg a C++ mangled name), the name of the corresponding source file, and other + function attributes. +* *mapping*: A binary that is part of the program during the profile + collection. It has a unique nonzero id, referenced from the locations. It + includes details on how the binary was mapped during program execution. By + convention the main program binary is the first mapping, followed by any + shared libraries. +* *string_table*: All strings in the profile are represented as indices into + this repeating field. The first string is empty, so index == 0 always + represents the empty string. + +# Measurement values + +Measurement values are represented as 64-bit integers. The profile contains an +explicit description of each value represented, using a ValueType message, with +two fields: + +* *Type*: A human-readable description of the type semantics. For example “cpu” + to represent CPU time, “wall” or “time” for wallclock time, or “memory” for + bytes allocated. +* *Unit*: A human-readable name of the unit represented by the 64-bit integer + values. For example, it could be “nanoseconds” or “milliseconds” for a time + value, or “bytes” or “megabytes” for a memory size. If this is just + representing a number of events, the recommended unit name is “count”. + +A profile can represent multiple measurements per sample, but all samples must +have the same number and type of measurements. The actual values are stored in +the Sample.value fields, each one described by the corresponding +Profile.sample_type field. + +Some profiles have a uniform period that describe the granularity of the data +collection. For example, a CPU profile may have a period of 100ms, or a memory +allocation profile may have a period of 512kb. Profiles can optionally describe +such a value on the Profile.period and Profile.period_type fields. The profile +period is meant for human consumption and does not affect the interpretation of +the profiling data. + +By convention, the first value on all profiles is the number of samples +collected at this call stack, with unit “count”. Because the profile does not +describe the sampling process beyond the optional period, it must include +unsampled values for all measurements. For example, a CPU profile could have +value[0] == samples, and value[1] == time in milliseconds. + +## Locations, functions and mappings + +Each sample lists the id of each location where the sample was collected, in +bottom-up order. Each location has an explicit unique nonzero integer id, +independent of its position in the profile, and holds additional information to +identify the corresponding source. + +The profile source is expected to perform any adjustment required to the +locations in order to point to the calls in the stack. For example, if the +profile source extracts the call stack by walking back over the program stack, +it must adjust the instruction addresses to point to the actual call +instruction, instead of the instruction that each call will return to. + +Sources usually generate profiles that fall into these two categories: + +* *Unsymbolized profiles*: These only contain instruction addresses, and are to + be symbolized by a separate tool. It is critical for each location to point to + a valid mapping, which will provide the information required for + symbolization. These are used for profiles of compiled languages, such as C++ + and Go. + +* *Symbolized profiles*: These contain all the symbol information available for + the profile. Mappings and instruction addresses are optional for symbolized + locations. These are used for profiles of interpreted or jitted languages, + such as Java or Python. Also, the profile format allows the generation of + mixed profiles, with symbolized and unsymbolized locations. + +The symbol information is represented in the repeating lines field of the +Location message. A location has multiple lines if it reflects multiple program +sources, for example if representing inlined call stacks. Lines reference +functions by their unique nonzero id, and the source line number within the +source file listed by the function. A function contains the source attributes +for a function, including its name, source file, etc. Functions include both a +user and a system form of the name, for example to include C++ demangled and +mangled names. For profiles where only a single name exists, both should be set +to the same string. + +Mappings are also referenced from locations by their unique nonzero id, and +include all information needed to symbolize addresses within the mapping. It +includes similar information to the Linux /proc/self/maps file. Locations +associated to a mapping should have addresses that land between the mapping +start and limit. Also, if available, mappings should include a build id to +uniquely identify the version of the binary being used. + +## Labels + +Samples optionally contain labels, which are annotations to discriminate samples +with identical locations. For example, a label can be used on a malloc profile +to indicate allocation size, so two samples on the same call stack with sizes +2MB and 4MB do not get merged into a single sample with two allocations and a +size of 6MB. + +Labels can be string-based or numeric. They are represented by the Label +message, with a key identifying the label and either a string or numeric +value. For numeric labels, by convention the key represents the measurement unit +of the numeric value. So for the previous example, the samples would have labels +{“bytes”, 2097152} and {“bytes”, 4194304}. + +## Keep and drop expressions + +Some profile sources may have knowledge of locations that are uninteresting or +irrelevant. However, if symbolization is needed in order to identify these +locations, the profile source may not be able to remove them when the profile is +generated. The profile format provides a mechanism to identify these frames by +name, through regular expressions. + +These expressions must match the function name in its entirety. Frames that +match Profile.drop\_frames will be dropped from the profile, along with any +frames below it. Frames that match Profile.keep\_frames will be kept, even if +they match drop\_frames. + diff --git a/doc/pprof.md b/doc/pprof.md new file mode 100644 index 00000000..e5340f05 --- /dev/null +++ b/doc/pprof.md @@ -0,0 +1,209 @@ +# pprof + +pprof is a tool for visualization and analysis of profiling data. + +pprof reads a collection of profiling samples in profile.proto format and +generates reports to visualize and help analyze the data. It can generate both +text and graphical reports (through the use of the dot visualization package). + +profile.proto is a protocol buffer that describes a set of callstacks +and symbolization information. A common usage is to represent a set of +sampled callstacks from statistical profiling. The format is +described on the src/proto/profile.proto file. For details on protocol +buffers, see https://developers.google.com/protocol-buffers + +Profiles can be read from a local file, or over http. Multiple +profiles of the same type can be aggregated or compared. + +If the profile samples contain machine addresses, pprof can symbolize +them through the use of the native binutils tools (addr2line and nm). + +# pprof profiles + +pprof operates on data in the profile.proto format. Each profile is a collection +of samples, where each sample is associated to a point in a location hierarchy, +one or more numeric values, and a set of labels. Often these profiles represents +data collected through statistical sampling of a program, so each sample +describes a program call stack and a number or weight of samples collected at a +location. pprof is agnostic to the profile semantics, so other uses are +possible. The interpretation of the reports generated by pprof depends on the +semantics defined by the source of the profile. + +# General usage + +The objective of pprof is to generate a report for a profile. The report is +generated from a location hierarchy, which is reconstructed from the profile +samples. Each location contains two values: *flat* is the value of the location +itself, while *cum* is the value of the location plus all its +descendants. Samples that include a location multiple times (eg for recursive +functions) are counted only once per location. + +The basic usage of pprof is + + pprof [options] source + +Where *format* selects the nature of the report, and *options* configure the +contents of the report. Each option has a value, which can be boolean, numeric, +or strings. While only one format can be specified, most options can be selected +independently of each other. + +Some common pprof options are: + +* **-flat [default]:** Sort entries based on their flat weight, on text reports. +* **-cum:** Sort entries based on cumulative weight, on text reports. +* **-functions [default]:** Accumulate samples at the function level; profile + locations that describe the same function will be merged into a report entry. +* **-lines:** Accumulate samples at the source line level; profile locations that + describe the same function will be merged into a report entry. +* **-addresses:** Accumulate samples at the instruction address; profile locations + that describe the same function address will be merged into a report entry. +* **-nodecount= _int_:** Maximum number of entries in the report. pprof will only print + this many entries and will use heuristics to select which entries to trim. +* **-focus= _regex_:** Only include samples that include a report entry matching + *regex*. +* **-ignore= _regex_:** Do not include samples that include a report entry matching + *regex*. +* **-show= _regex_:** Only show entries that match *regex*. +* **-hide= _regex_:** Do not show entries that match *regex*. + +Each sample in a profile may include multiple values, representing different +entities associated to the sample. pprof reports include a single sample value, +which by convention is the last one specified in the report. The `sample_index=` +option selects which value to use, and can be set to a number (from 0 to the +number of values - 1) or the name of the sample value. + +Sample values are numeric values associated to a unit. If pprof can recognize +these units, it will attempt to scale the values to a suitable unit for +visualization. The `unite=` option will force the use of a specific unit. For +example, `sample_index=sec` will force any time values to be reported in +seconds. pprof recognizes most common time and memory size units. + +## Text reports + +pprof text reports show the location hierarchy in text format. + +* **-text:** Prints the location entries, one per line, including the flat and cum + values. +* **-tree:** Prints each location entry with its predecessors and successors. +* **-peek= _regex_:** Print the location entry with all its predecessors and + successors, without trimming any entries. +* **-traces:** Prints each sample with a location per line. + +## Graphical reports + +pprof can generate graphical reports on the DOT format, and convert them to +multiple formats using the graphviz package. + +These reports represent the location hierarchy as a graph, with a report entry +represented as a node. Solid edges represent a direct connection between +entries, while dotted edges represent a connection where some intermediate nodes +have been removed. Nodes are removed using heuristics to limit the size of +the graph, controlled by the *nodecount* option. + +The size of each node represents the flat weight of the node, and the width of +each edge represents the cumulative weight of all samples going through +it. Nodes are colored according to their cumulative weight, highlighting the +paths with the highest cum weight. + +* **-dot:** Generates a report in .dot format. All other formats are generated from + this one. +* **-svg:** Generates a report in SVG format. +* **-web:** Generates a report in SVG format on a temp file, and starts a web + browser to view it. +* **-png, -jpg, -gif, -pdf:** Generates a report in these formats, + +## Annotated code + +pprof can also generate reports of annotated source with samples associated to +them. For these, the source or binaries must be locally available, and the +profile must contain data with the appropriate level of detail. + +pprof will look for source files on its current working directory and all its +ancestors. pprof will look for binaries on the directories specified in the +`$PPROF_BINARY_PATH` environment variable, by default `$HOME/pprof/binaries`. It +will look binaries up by name, and if the profile includes linker build ids, it +will also search for them in a directory named as the build id. + +pprof uses the binutils tools to examine and disassemble the binaries. By +default it will search for those tools in the current path, but it can also +search for them in a directory pointed to by the environment variable +`$PPROF_TOOLS`. + +* **-disasm= _regex_:** Generates an annotated source listing for functions matching + regex, with flat/cum weights for each source line. +* **-list= _regex_:** Generates an annotated disassembly listing for functions + matching *regex*. +* **-weblist= _regex_:** Generates a source/assembly combined annotated listing for + functions matching *regex*, and starts a web browser to display it. + +# Fetching profiles + +pprof can read profiles from a file or directly from a URL over http. Its native +format is a gzipped profile.proto file, but it can also accept some legacy +formats generated by [gperftools](https://github.com/gperftools/gperftools). + +When fetching from a URL handler, pprof accepts options to indicate how much to +wait for the profile. + +* **-seconds= _int_:** Makes pprof request for a profile with the specified duration + in seconds. Only makes sense for profiles based on elapsed time, such as CPU + profiles. +* **-timeout= _int_:** Makes pprof wait for the specified timeout when retrieving a + profile over http. If not specified, pprof will use heuristics to determine a + reasonable timeout. + +If multiple profiles are specified, pprof will fetch them all and merge +them. This is useful to combine profiles from multiple processes of a +distributed job. The profiles may be from different programs but must be +compatible (for example, CPU profiles cannot be combined with heap profiles). + +pprof can subtract a profile from another in order to compare them. For that, +use the **-base= _profile_** option, where *profile* is the filename or URL for the +profile to be subtracted. This may result on some report entries having negative +values. + +## Symbolization + +pprof can add symbol information to a profile that was collected only with +address information. This is useful for profiles for compiled languages, where +it may not be easy or even possible for the profile source to include function +names or source coordinates. + +pprof can extract the symbol information locally by examining the binaries using +the binutils tools, or it can ask running jobs that provide a symbolization +interface. + +pprof will attempt symbolizing profiles by default, and its `-symbolize` option +provides some control over symbolization: + +* **-symbolize=none:** Disables any symbolization from pprof. + +* **-symbolize=local:** Only attempts symbolizing the profile from local + binaries using the binutils tools. + +* **-symbolize=remote:** Only attempts to symbolize running jobs by contacting + their symbolization handler. + +For local symbolization, pprof will look for the binaries on the paths specified +by the profile, and then it will search for them on the path specified by the +environment variable `$PPROF_BINARY_PATH`. Also, the name of the main binary can +be passed directly to pprof as its first parameter, to override the name or +location of the main binary of the profile, like this: + + pprof /path/to/binary profile.pb.gz + +By default pprof will attempt to demangle and simplify C++ names, to provide +readable names for C++ symbols. It will aggressively discard template and +function parameters. This can be controlled with the `-symbolize=demangle` +option. Note that for remote symbolization mangled names may not be provided by +the symbolization handler. + +* **--symbolize=demangle=none:** Do not perform any demangling. Show mangled + names if available. + +* **-symbolize=demangle=full:** Demangle, but do not perform any + simplification. Show full demangled names if available. + +* **-symbolize=demangle=templates:** Demangle, and trim function parameters, but + not template parameters. + diff --git a/src/driver/driver.go b/src/driver/driver.go new file mode 100644 index 00000000..20e544a1 --- /dev/null +++ b/src/driver/driver.go @@ -0,0 +1,280 @@ +// Copyright 2014 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package driver provides an external entry point to the pprof driver. +package driver + +import ( + internaldriver "internal/driver" + "internal/plugin" + "io" + "regexp" + "time" + + "profile" +) + +// PProf acquires a profile, and symbolizes it using a profile +// manager. Then it generates a report formatted according to the +// options selected through the flags package. +func PProf(o *Options) error { + return internaldriver.PProf(o.InternalOptions()) +} + +func (o *Options) InternalOptions() *plugin.Options { + var obj plugin.ObjTool + if o.Obj != nil { + obj = &internalObjTool{o.Obj} + } + var sym plugin.Symbolizer + if o.Sym != nil { + sym = &internalSymbolizer{o.Sym} + } + return &plugin.Options{ + o.Writer, + o.Flagset, + o.Fetch, + sym, + obj, + o.UI, + } +} + +// Options groups all the optional plugins into pprof. +type Options struct { + Writer Writer + Flagset FlagSet + Fetch Fetcher + Sym Symbolizer + Obj ObjTool + UI UI +} + +// Writer provides a mechanism to write data under a certain name, +// typically a filename. +type Writer interface { + Open(name string) (io.WriteCloser, error) +} + +// A FlagSet creates and parses command-line flags. +// It is similar to the standard flag.FlagSet. +type FlagSet interface { + // Bool, Int, Float64, and String define new flags, + // like the functions of the same name in package flag. + Bool(name string, def bool, usage string) *bool + Int(name string, def int, usage string) *int + Float64(name string, def float64, usage string) *float64 + String(name string, def string, usage string) *string + + // BoolVar, IntVar, Float64Var, and StringVar define new flags referencing + // a given pointer, like the functions of the same name in package flag. + BoolVar(pointer *bool, name string, def bool, usage string) + IntVar(pointer *int, name string, def int, usage string) + Float64Var(pointer *float64, name string, def float64, usage string) + StringVar(pointer *string, name string, def string, usage string) + + // StringList is similar to String but allows multiple values for a + // single flag + StringList(name string, def string, usage string) *[]*string + + // ExtraUsage returns any additional text that should be + // printed after the standard usage message. + // The typical use of ExtraUsage is to show any custom flags + // defined by the specific pprof plugins being used. + ExtraUsage() string + + // Parse initializes the flags with their values for this run + // and returns the non-flag command line arguments. + // If an unknown flag is encountered or there are no arguments, + // Parse should call usage and return nil. + Parse(usage func()) []string +} + +// A Fetcher reads and returns the profile named by src, using +// the specified duration and timeout. It returns the fetched +// profile and a string indicating a URL from where the profile +// was fetched, which may be different than src. +type Fetcher interface { + Fetch(src string, duration, timeout time.Duration) (*profile.Profile, string, error) +} + +// A Symbolizer introduces symbol information into a profile. +type Symbolizer interface { + Symbolize(mode string, srcs MappingSources, prof *profile.Profile) error +} + +// MappingSources map each profile.Mapping to the source of the profile. +// The key is either Mapping.File or Mapping.BuildId. +type MappingSources map[string][]struct { + Source string // URL of the source the mapping was collected from + Start uint64 // delta applied to addresses from this source (to represent Merge adjustments) +} + +// An ObjTool inspects shared libraries and executable files. +type ObjTool interface { + // Open opens the named object file. If the object is a shared + // library, start/limit/offset are the addresses where it is mapped + // into memory in the address space being inspected. + Open(file string, start, limit, offset uint64) (ObjFile, error) + + // Disasm disassembles the named object file, starting at + // the start address and stopping at (before) the end address. + Disasm(file string, start, end uint64) ([]Inst, error) +} + +// An Inst is a single instruction in an assembly listing. +type Inst struct { + Addr uint64 // virtual address of instruction + Text string // instruction text + File string // source file + Line int // source line +} + +// An ObjFile is a single object file: a shared library or executable. +type ObjFile interface { + // Name returns the underlying file name, if available. + Name() string + + // Base returns the base address to use when looking up symbols in the file. + Base() uint64 + + // BuildID returns the GNU build ID of the file, or an empty string. + BuildID() string + + // SourceLine reports the source line information for a given + // address in the file. Due to inlining, the source line information + // is in general a list of positions representing a call stack, + // with the leaf function first. + SourceLine(addr uint64) ([]Frame, error) + + // Symbols returns a list of symbols in the object file. + // If r is not nil, Symbols restricts the list to symbols + // with names matching the regular expression. + // If addr is not zero, Symbols restricts the list to symbols + // containing that address. + Symbols(r *regexp.Regexp, addr uint64) ([]*Sym, error) + + // Close closes the file, releasing associated resources. + Close() error +} + +// A Frame describes a single line in a source file. +type Frame struct { + Func string // name of function + File string // source file name + Line int // line in file +} + +// A Sym describes a single symbol in an object file. +type Sym struct { + Name []string // names of symbol (many if symbol was dedup'ed) + File string // object file containing symbol + Start uint64 // start virtual address + End uint64 // virtual address of last byte in sym (Start+size-1) +} + +// A UI manages user interactions. +type UI interface { + // Read returns a line of text (a command) read from the user. + // prompt is printed before reading the command. + ReadLine(prompt string) (string, error) + + // Print shows a message to the user. + // It formats the text as fmt.Print would and adds a final \n if not already present. + // For line-based UI, Print writes to standard error. + // (Standard output is reserved for report data.) + Print(...interface{}) + + // PrintErr shows an error message to the user. + // It formats the text as fmt.Print would and adds a final \n if not already present. + // For line-based UI, PrintErr writes to standard error. + PrintErr(...interface{}) + + // IsTerminal returns whether the UI is known to be tied to an + // interactive terminal (as opposed to being redirected to a file). + IsTerminal() bool + + // SetAutoComplete instructs the UI to call complete(cmd) to obtain + // the auto-completion of cmd, if the UI supports auto-completion at all. + SetAutoComplete(complete func(string) string) +} + +// internalObjTool is a wrapper to map from the pprof external +// interface to the internal interface. +type internalObjTool struct { + ObjTool +} + +func (o *internalObjTool) Open(file string, start, limit, offset uint64) (plugin.ObjFile, error) { + f, err := o.ObjTool.Open(file, start, limit, offset) + if err != nil { + return nil, err + } + return &internalObjFile{f}, err +} + +type internalObjFile struct { + ObjFile +} + +func (f *internalObjFile) SourceLine(frame uint64) ([]plugin.Frame, error) { + frames, err := f.ObjFile.SourceLine(frame) + if err != nil { + return nil, err + } + var pluginFrames []plugin.Frame + for _, f := range frames { + pluginFrames = append(pluginFrames, plugin.Frame(f)) + } + return pluginFrames, nil +} + +func (f *internalObjFile) Symbols(r *regexp.Regexp, addr uint64) ([]*plugin.Sym, error) { + syms, err := f.ObjFile.Symbols(r, addr) + if err != nil { + return nil, err + } + var pluginSyms []*plugin.Sym + for _, s := range syms { + ps := plugin.Sym(*s) + pluginSyms = append(pluginSyms, &ps) + } + return pluginSyms, nil +} + +func (o *internalObjTool) Disasm(file string, start, end uint64) ([]plugin.Inst, error) { + insts, err := o.ObjTool.Disasm(file, start, end) + if err != nil { + return nil, err + } + var pluginInst []plugin.Inst + for _, inst := range insts { + pluginInst = append(pluginInst, plugin.Inst(inst)) + } + return pluginInst, nil +} + +// internalSymbolizer is a wrapper to map from the pprof external +// interface to the internal interface. +type internalSymbolizer struct { + Symbolizer +} + +func (s *internalSymbolizer) Symbolize(mode string, srcs plugin.MappingSources, prof *profile.Profile) error { + isrcs := plugin.MappingSources{} + for m, s := range srcs { + isrcs[m] = s + } + return s.Symbolize(mode, isrcs, prof) +} diff --git a/src/internal/binutils/addr2liner.go b/src/internal/binutils/addr2liner.go new file mode 100644 index 00000000..bc79758e --- /dev/null +++ b/src/internal/binutils/addr2liner.go @@ -0,0 +1,179 @@ +// Copyright 2014 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package binutils + +import ( + "bufio" + "fmt" + "io" + "os/exec" + "strconv" + "strings" + + "internal/plugin" +) + +const ( + defaultAddr2line = "addr2line" + + // addr2line may produce multiple lines of output. We + // use this sentinel to identify the end of the output. + sentinel = ^uint64(0) +) + +// Addr2Liner is a connection to an addr2line command for obtaining +// address and line number information from a binary. +type addr2Liner struct { + filename string + cmd *exec.Cmd + in io.WriteCloser + out *bufio.Reader + err error + + base uint64 +} + +// newAddr2liner starts the given addr2liner command reporting +// information about the given executable file. If file is a shared +// library, base should be the address at which is was mapped in the +// program under consideration. +func newAddr2Liner(cmd, file string, base uint64) (*addr2Liner, error) { + if cmd == "" { + cmd = defaultAddr2line + } + + a := &addr2Liner{ + filename: file, + base: base, + cmd: exec.Command(cmd, "-aif", "-e", file), + } + + var err error + if a.in, err = a.cmd.StdinPipe(); err != nil { + return nil, err + } + + outPipe, err := a.cmd.StdoutPipe() + if err != nil { + return nil, err + } + + a.out = bufio.NewReader(outPipe) + if err := a.cmd.Start(); err != nil { + return nil, err + } + return a, nil +} + +// close releases any resources used by the addr2liner object. +func (d *addr2Liner) close() { + d.in.Close() + d.cmd.Wait() +} + +func (d *addr2Liner) readString() (s string) { + if d.err != nil { + return "" + } + if s, d.err = d.out.ReadString('\n'); d.err != nil { + return "" + } + return strings.TrimSpace(s) +} + +// readFrame parses the addr2line output for a single address. It +// returns a populated plugin.Frame and whether it has reached the end of the +// data. +func (d *addr2Liner) readFrame() (plugin.Frame, bool) { + funcname := d.readString() + + if strings.HasPrefix(funcname, "0x") { + // If addr2line returns a hex address we can assume it is the + // sentinel. Read and ignore next two lines of output from + // addr2line + d.readString() + d.readString() + return plugin.Frame{}, true + } + + fileline := d.readString() + if d.err != nil { + return plugin.Frame{}, true + } + + linenumber := 0 + + if funcname == "??" { + funcname = "" + } + + if fileline == "??:0" { + fileline = "" + } else { + if i := strings.LastIndex(fileline, ":"); i >= 0 { + // Remove discriminator, if present + if disc := strings.Index(fileline, " (discriminator"); disc > 0 { + fileline = fileline[:disc] + } + // If we cannot parse a number after the last ":", keep it as + // part of the filename. + if line, err := strconv.Atoi(fileline[i+1:]); err == nil { + linenumber = line + fileline = fileline[:i] + } + } + } + + return plugin.Frame{funcname, fileline, linenumber}, false +} + +// addrInfo returns the stack frame information for a specific program +// address. It returns nil if the address could not be identified. +func (d *addr2Liner) addrInfo(addr uint64) ([]plugin.Frame, error) { + if d.err != nil { + return nil, d.err + } + + if _, d.err = fmt.Fprintf(d.in, "%x\n", addr-d.base); d.err != nil { + return nil, d.err + } + + if _, d.err = fmt.Fprintf(d.in, "%x\n", sentinel); d.err != nil { + return nil, d.err + } + + resp := d.readString() + if d.err != nil { + return nil, d.err + } + + if !strings.HasPrefix(resp, "0x") { + d.err = fmt.Errorf("unexpected addr2line output: %s", resp) + return nil, d.err + } + + var stack []plugin.Frame + for { + frame, end := d.readFrame() + if end { + break + } + + if frame != (plugin.Frame{}) { + stack = append(stack, frame) + } + } + return stack, d.err +} diff --git a/src/internal/binutils/addr2liner_nm.go b/src/internal/binutils/addr2liner_nm.go new file mode 100644 index 00000000..6d1d61e6 --- /dev/null +++ b/src/internal/binutils/addr2liner_nm.go @@ -0,0 +1,122 @@ +// Copyright 2014 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package binutils + +import ( + "bufio" + "bytes" + "io" + "os/exec" + "strconv" + "strings" + + "internal/plugin" +) + +const ( + defaultNM = "nm" +) + +// addr2LinerNM is a connection to an nm command for obtaining address +// information from a binary. +type addr2LinerNM struct { + m []symbolInfo // Sorted list of addresses from binary. +} + +type symbolInfo struct { + address uint64 + name string +} + +// newAddr2LinerNM starts the given nm command reporting information about the +// given executable file. If file is a shared library, base should be +// the address at which is was mapped in the program under +// consideration. +func newAddr2LinerNM(cmd, file string, base uint64) (*addr2LinerNM, error) { + if cmd == "" { + cmd = defaultNM + } + + a := &addr2LinerNM{ + m: []symbolInfo{}, + } + + var b bytes.Buffer + c := exec.Command(cmd, "-n", file) + c.Stdout = &b + + if err := c.Run(); err != nil { + return nil, err + } + + // Parse addr2line output and populate symbol map. + // Skip lines we fail to parse. + buf := bufio.NewReader(&b) + for { + line, err := buf.ReadString('\n') + if line == "" && err != nil { + if err == io.EOF { + break + } + return nil, err + } + fields := strings.SplitN(line, " ", 3) + if len(fields) != 3 { + continue + } + address, err := strconv.ParseUint(fields[0], 16, 64) + if err != nil { + continue + } + a.m = append(a.m, symbolInfo{ + address: address + base, + name: fields[2], + }) + } + + return a, nil +} + +// addrInfo returns the stack frame information for a specific program +// address. It returns nil if the address could not be identified. +func (a *addr2LinerNM) addrInfo(addr uint64) ([]plugin.Frame, error) { + if len(a.m) == 0 || addr < a.m[0].address || addr > a.m[len(a.m)-1].address { + return nil, nil + } + + // Binary search. Search until low, high are separated by 1. + low, high := 0, len(a.m) + for low+1 < high { + mid := (low + high) / 2 + v := a.m[mid].address + if addr == v { + low = mid + break + } else if addr > v { + low = mid + } else { + high = mid + } + } + + // Address is between a.m[low] and a.m[high]. + // Pick low, as it represents [low, high). + f := []plugin.Frame{ + { + Func: a.m[low].name, + }, + } + return f, nil +} diff --git a/src/internal/binutils/binutils.go b/src/internal/binutils/binutils.go new file mode 100644 index 00000000..ddaf4b9d --- /dev/null +++ b/src/internal/binutils/binutils.go @@ -0,0 +1,242 @@ +// Copyright 2014 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package binutils provides access to the GNU binutils. +package binutils + +import ( + "debug/elf" + "fmt" + "os" + "os/exec" + "path/filepath" + "regexp" + "strings" + + "internal/elfexec" + "internal/plugin" +) + +// A Binutils implements plugin.ObjTool by invoking the GNU binutils. +// SetConfig must be called before any of the other methods. +type Binutils struct { + // Commands to invoke. + addr2line string + nm string + objdump string + + // if fast, perform symbolization using nm (symbol names only), + // instead of file-line detail from the slower addr2line. + fast bool +} + +// SetFastSymbolization sets a toggle that makes binutils use fast +// symbolization (using nm), which is much faster than addr2line but +// provides only symbol name information (no file/line). +func (b *Binutils) SetFastSymbolization(fast bool) { + b.fast = fast +} + +// SetTools processes the contents of the tools option. It +// expects a set of entries separated by commas; each entry is a pair +// of the form t:path, where cmd will be used to look only for the +// tool named t. If t is not specified, the path is searched for all +// tools. +func (b *Binutils) SetTools(config string) { + // paths collect paths per tool; Key "" contains the default. + paths := make(map[string][]string) + for _, t := range strings.Split(config, ",") { + name, path := "", t + if ct := strings.SplitN(t, ":", 2); len(ct) == 2 { + name, path = ct[0], ct[1] + } + paths[name] = append(paths[name], path) + } + + defaultPath := paths[""] + b.addr2line = findExe("addr2line", append(paths["addr2line"], defaultPath...)) + b.nm = findExe("nm", append(paths["nm"], defaultPath...)) + b.objdump = findExe("objdump", append(paths["objdump"], defaultPath...)) +} + +// findExe looks for an executable command on a set of paths. +// If it cannot find it, returns cmd. +func findExe(cmd string, paths []string) string { + for _, p := range paths { + cp := filepath.Join(p, cmd) + if c, err := exec.LookPath(cp); err == nil { + return c + } + } + return cmd +} + +// Disasm returns the assembly instructions for the specified address range +// of a binary. +func (b *Binutils) Disasm(file string, start, end uint64) ([]plugin.Inst, error) { + if b.addr2line == "" { + // Update the command invocations if not initialized. + b.SetTools("") + } + return disassemble(b.objdump, file, start, end) +} + +// Open satisfies the plugin.ObjTool interface. +func (b *Binutils) Open(name string, start, limit, offset uint64) (plugin.ObjFile, error) { + if b.addr2line == "" { + // Update the command invocations if not initialized. + b.SetTools("") + } + + // Make sure file is a supported executable. + // The pprof driver uses Open to sniff the difference + // between an executable and a profile. + // For now, only ELF is supported. + // Could read the first few bytes of the file and + // use a table of prefixes if we need to support other + // systems at some point. + + f, err := os.Open(name) + if err != nil { + // For testing, do not require file name to exist. + if strings.Contains(b.addr2line, "testdata/") { + return &fileAddr2Line{file: file{b: b, name: name}}, nil + } + + return nil, err + } + defer f.Close() + + ef, err := elf.NewFile(f) + if err != nil { + return nil, fmt.Errorf("Parsing %s: %v", name, err) + } + + var stextOffset *uint64 + var pageAligned = func(addr uint64) bool { return addr%4096 == 0 } + if strings.Contains(name, "vmlinux") || !pageAligned(start) || !pageAligned(limit) || !pageAligned(offset) { + // Reading all Symbols is expensive, and we only rarely need it so + // we don't want to do it every time. But if _stext happens to be + // page-aligned but isn't the same as Vaddr, we would symbolize + // wrong. So if the name the addresses aren't page aligned, or if + // the name is "vmlinux" we read _stext. We can be wrong if: (1) + // someone passes a kernel path that doesn't contain "vmlinux" AND + // (2) _stext is page-aligned AND (3) _stext is not at Vaddr + symbols, err := ef.Symbols() + if err != nil { + return nil, err + } + for _, s := range symbols { + if s.Name == "_stext" { + // The kernel may use _stext as the mapping start address. + stextOffset = &s.Value + break + } + } + } + + base, err := elfexec.GetBase(&ef.FileHeader, nil, stextOffset, start, limit, offset) + if err != nil { + return nil, fmt.Errorf("Could not identify base for %s: %v", name, err) + } + + // Find build ID, while we have the file open. + buildID := "" + if id, err := elfexec.GetBuildID(f); err == nil { + buildID = fmt.Sprintf("%x", id) + } + if b.fast { + return &fileNM{file: file{b, name, base, buildID}}, nil + } + return &fileAddr2Line{file: file{b, name, base, buildID}}, nil +} + +// file implements the binutils.ObjFile interface. +type file struct { + b *Binutils + name string + base uint64 + buildID string +} + +func (f *file) Name() string { + return f.name +} + +func (f *file) Base() uint64 { + return f.base +} + +func (f *file) BuildID() string { + return f.buildID +} + +func (f *file) SourceLine(addr uint64) ([]plugin.Frame, error) { + return []plugin.Frame{}, nil +} + +func (f *file) Close() error { + return nil +} + +func (f *file) Symbols(r *regexp.Regexp, addr uint64) ([]*plugin.Sym, error) { + return findSymbols(f.b.nm, f.name, r, addr) +} + +// fileNM implements the binutils.ObjFile interface, using 'nm' to map +// addresses to symbols (without file/line number information). It is +// faster than fileAddr2Line. +type fileNM struct { + file + addr2linernm *addr2LinerNM +} + +func (f *fileNM) SourceLine(addr uint64) ([]plugin.Frame, error) { + if f.addr2linernm == nil { + addr2liner, err := newAddr2LinerNM(f.b.nm, f.name, f.base) + if err != nil { + return nil, err + } + f.addr2linernm = addr2liner + } + return f.addr2linernm.addrInfo(addr) +} + +// fileAddr2Line implements the binutils.ObjFile interface, using +// 'addr2line' to map addresses to symbols (with file/line number +// information). It can be slow for large binaries with debug +// information. +type fileAddr2Line struct { + file + addr2liner *addr2Liner +} + +func (f *fileAddr2Line) SourceLine(addr uint64) ([]plugin.Frame, error) { + if f.addr2liner == nil { + addr2liner, err := newAddr2Liner(f.b.addr2line, f.name, f.base) + if err != nil { + return nil, err + } + f.addr2liner = addr2liner + } + return f.addr2liner.addrInfo(addr) +} + +func (f *fileAddr2Line) Close() error { + if f.addr2liner != nil { + f.addr2liner.close() + f.addr2liner = nil + } + return nil +} diff --git a/src/internal/binutils/binutils_test.go b/src/internal/binutils/binutils_test.go new file mode 100644 index 00000000..b1502e72 --- /dev/null +++ b/src/internal/binutils/binutils_test.go @@ -0,0 +1,111 @@ +// Copyright 2014 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package binutils + +import ( + "fmt" + "testing" + + "internal/plugin" +) + +var testAddrMap = map[int]string{ + 1000: "_Z3fooid.clone2", + 2000: "_ZNSaIiEC1Ev.clone18", + 3000: "_ZNSt6vectorIS_IS_IiSaIiEESaIS1_EESaIS3_EEixEm", +} + +func functionName(level int) (name string) { + if name = testAddrMap[level]; name != "" { + return name + } + return fmt.Sprintf("fun%d", level) +} + +func TestAddr2Liner(t *testing.T) { + const offset = 0x500 + + a, err := newAddr2Liner("testdata/wrapper/addr2line", "executable", offset) + if err != nil { + t.Fatalf("Addr2Liner Open: %v", err) + } + + for i := 1; i < 8; i++ { + addr := i*0x1000 + offset + s, err := a.addrInfo(uint64(addr)) + if err != nil { + t.Fatalf("addrInfo(%#x): %v", addr, err) + } + if len(s) != i { + t.Fatalf("addrInfo(%#x): got len==%d, want %d", addr, len(s), i) + } + for l, f := range s { + level := (len(s) - l) * 1000 + want := plugin.Frame{functionName(level), fmt.Sprintf("file%d", level), level} + + if f != want { + t.Errorf("AddrInfo(%#x)[%d]: = %+v, want %+v", addr, l, f, want) + } + } + } + s, err := a.addrInfo(0xFFFF) + if err != nil { + t.Fatalf("addrInfo(0xFFFF): %v", err) + } + if len(s) != 0 { + t.Fatalf("AddrInfo(0xFFFF): got len==%d, want 0", len(s)) + } + a.close() +} + +func TestAddr2LinerLookup(t *testing.T) { + oddSizedMap := addr2LinerNM{ + m: []symbolInfo{ + {0x1000, "0x1000"}, + {0x2000, "0x2000"}, + {0x3000, "0x3000"}, + }, + } + evenSizedMap := addr2LinerNM{ + m: []symbolInfo{ + {0x1000, "0x1000"}, + {0x2000, "0x2000"}, + {0x3000, "0x3000"}, + {0x4000, "0x4000"}, + }, + } + for _, a := range []*addr2LinerNM{ + &oddSizedMap, &evenSizedMap, + } { + for address, want := range map[uint64]string{ + 0x1000: "0x1000", + 0x1001: "0x1000", + 0x1FFF: "0x1000", + 0x2000: "0x2000", + 0x2001: "0x2000", + } { + if got, _ := a.addrInfo(address); !checkAddress(got, address, want) { + t.Errorf("%x: got %v, want %s", address, got, want) + } + } + } +} + +func checkAddress(got []plugin.Frame, address uint64, want string) bool { + if len(got) != 1 { + return false + } + return got[0].Func == want +} diff --git a/src/internal/binutils/disasm.go b/src/internal/binutils/disasm.go new file mode 100644 index 00000000..01a270d5 --- /dev/null +++ b/src/internal/binutils/disasm.go @@ -0,0 +1,156 @@ +// Copyright 2014 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package binutils + +import ( + "bytes" + "fmt" + "io" + "os/exec" + "regexp" + "strconv" + + "internal/plugin" + "golang/demangle" +) + +var ( + nmOutputRE = regexp.MustCompile(`^\s*([[:xdigit:]]+)\s+(.)\s+(.*)`) + objdumpAsmOutputRE = regexp.MustCompile(`^\s*([[:xdigit:]]+):\s+(.*)`) + objdumpOutputFileLine = regexp.MustCompile(`^(.*):([0-9]+)`) +) + +func findSymbols(nm, file string, r *regexp.Regexp, address uint64) ([]*plugin.Sym, error) { + // Get from nm a list of symbols sorted by address. + cmd := exec.Command(nm, "-n", file) + out, err := cmd.Output() + if err != nil { + return nil, fmt.Errorf("%v: %v", cmd.Args, err) + } + + // Collect all symbols from the nm output, grouping names mapped to + // the same address into a single symbol. + var symbols []*plugin.Sym + names, start := []string{}, uint64(0) + buf := bytes.NewBuffer(out) + for symAddr, name, err := nextSymbol(buf); err == nil; symAddr, name, err = nextSymbol(buf) { + if err != nil { + return nil, err + } + if start == symAddr { + names = append(names, name) + continue + } + if match := matchSymbol(names, start, symAddr-1, r, address); match != nil { + symbols = append(symbols, &plugin.Sym{match, file, start, symAddr - 1}) + } + names, start = []string{name}, symAddr + } + + return symbols, nil +} + +// matchSymbol checks if a symbol is to be selected by checking its +// name to the regexp and optionally its address. It returns the name(s) +// to be used for the matched symbol, or nil if no match +func matchSymbol(names []string, start, end uint64, r *regexp.Regexp, address uint64) []string { + if address != 0 && address >= start && address <= end { + return names + } + for _, name := range names { + if r.MatchString(name) { + return []string{name} + } + + // Match all possible demangled versions of the name. + for _, o := range [][]demangle.Option{ + {demangle.NoClones}, + {demangle.NoParams}, + {demangle.NoParams, demangle.NoTemplateParams}, + } { + if demangled, err := demangle.ToString(name, o...); err == nil && r.MatchString(demangled) { + return []string{demangled} + } + } + } + return nil +} + +// disassemble returns the assembly instructions in a function from a +// binary file. It uses objdump to obtain the assembly listing. +func disassemble(objdump string, file string, start, stop uint64) ([]plugin.Inst, error) { + cmd := exec.Command(objdump, "-d", "-C", "--no-show-raw-insn", "-l", + fmt.Sprintf("--start-address=%#x", start), + fmt.Sprintf("--stop-address=%#x", stop), + file) + out, err := cmd.Output() + if err != nil { + return nil, fmt.Errorf("%v: %v", cmd.Args, err) + } + + buf := bytes.NewBuffer(out) + file, line := "", 0 + var assembly []plugin.Inst + for { + input, err := buf.ReadString('\n') + if err != nil { + if err != io.EOF { + return nil, err + } + if input == "" { + break + } + } + + if fields := objdumpAsmOutputRE.FindStringSubmatch(input); len(fields) == 3 { + if address, err := strconv.ParseUint(fields[1], 16, 64); err == nil { + assembly = append(assembly, + plugin.Inst{ + Addr: address, + Text: fields[2], + File: file, + Line: line, + }) + continue + } + } + if fields := objdumpOutputFileLine.FindStringSubmatch(input); len(fields) == 3 { + if l, err := strconv.ParseUint(fields[2], 10, 32); err == nil { + file, line = fields[1], int(l) + } + } + } + + return assembly, nil +} + +// nextSymbol parses the nm output to find the next symbol listed. +// Skips over any output it cannot recognize. +func nextSymbol(buf *bytes.Buffer) (uint64, string, error) { + for { + line, err := buf.ReadString('\n') + if err != nil { + if err != io.EOF || line == "" { + return 0, "", err + } + } + + if fields := nmOutputRE.FindStringSubmatch(line); len(fields) == 4 { + if address, err := strconv.ParseUint(fields[1], 16, 64); err == nil { + return address, fields[3], nil + } + } + } +} diff --git a/src/internal/binutils/disasm_test.go b/src/internal/binutils/disasm_test.go new file mode 100644 index 00000000..0d15ad53 --- /dev/null +++ b/src/internal/binutils/disasm_test.go @@ -0,0 +1,133 @@ +// Copyright 2014 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package binutils + +import ( + "fmt" + "regexp" + "testing" + + "internal/plugin" +) + +// TestFindSymbols tests the FindSymbols routine by using a fake nm +// script. +func TestFindSymbols(t *testing.T) { + type testcase struct { + query string + want []plugin.Sym + } + + testcases := []testcase{ + { + "line.*[AC]", + []plugin.Sym{ + {[]string{"lineA001"}, "object.o", 0x1000, 0x1FFF}, + {[]string{"line200A"}, "object.o", 0x2000, 0x2FFF}, + {[]string{"lineB00C"}, "object.o", 0x3000, 0x3FFF}, + }, + }, + { + "Dumb::operator", + []plugin.Sym{ + {[]string{"Dumb::operator()(char const*) const"}, "object.o", 0x3000, 0x3FFF}, + }, + }, + } + + const nm = "testdata/wrapper/nm" + for _, tc := range testcases { + syms, err := findSymbols(nm, "object.o", regexp.MustCompile(tc.query), 0) + if err != nil { + t.Fatalf("%q: findSymbols: %v", tc.query, err) + } + if err := checkSymbol(syms, tc.want); err != nil { + t.Errorf("%q: %v", tc.query, err) + } + } +} + +func checkSymbol(got []*plugin.Sym, want []plugin.Sym) error { + if len(got) != len(want) { + return fmt.Errorf("unexpected number of symbols %d (want %d)\n", len(got), len(want)) + } + + for i, g := range got { + w := want[i] + if len(g.Name) != len(w.Name) { + return fmt.Errorf("names, got %d, want %d", len(g.Name), len(w.Name)) + } + for n := range g.Name { + if g.Name[n] != w.Name[n] { + return fmt.Errorf("name %d, got %q, want %q", n, g.Name[n], w.Name[n]) + } + } + if g.File != w.File { + return fmt.Errorf("filename, got %q, want %q", g.File, w.File) + } + if g.Start != w.Start { + return fmt.Errorf("start address, got %#x, want %#x", g.Start, w.Start) + } + if g.End != w.End { + return fmt.Errorf("end address, got %#x, want %#x", g.End, w.End) + } + } + return nil +} + +// TestFunctionAssembly tests the FunctionAssembly routine by using a +// fake objdump script. +func TestFunctionAssembly(t *testing.T) { + type testcase struct { + s plugin.Sym + want []plugin.Inst + } + testcases := []testcase{ + { + plugin.Sym{[]string{"symbol1"}, "", 0x1000, 0x1FFF}, + []plugin.Inst{ + {0x1000, "instruction one", "", 0}, + {0x1001, "instruction two", "", 0}, + {0x1002, "instruction three", "", 0}, + {0x1003, "instruction four", "", 0}, + }, + }, + { + plugin.Sym{[]string{"symbol2"}, "", 0x2000, 0x2FFF}, + []plugin.Inst{ + {0x2000, "instruction one", "", 0}, + {0x2001, "instruction two", "", 0}, + }, + }, + } + + const objdump = "testdata/wrapper/objdump" + + for _, tc := range testcases { + insns, err := disassemble(objdump, "object.o", tc.s.Start, tc.s.End) + if err != nil { + t.Fatalf("FunctionAssembly: %v", err) + } + + if len(insns) != len(tc.want) { + t.Errorf("Unexpected number of assembly instructions %d (want %d)\n", len(insns), len(tc.want)) + } + for i := range insns { + if insns[i] != tc.want[i] { + t.Errorf("Expected symbol %v, got %v\n", tc.want[i], insns[i]) + } + } + } +} diff --git a/src/internal/binutils/testdata/wrapper/addr2line b/src/internal/binutils/testdata/wrapper/addr2line new file mode 100755 index 00000000..ecbea0b5 --- /dev/null +++ b/src/internal/binutils/testdata/wrapper/addr2line @@ -0,0 +1,85 @@ +#!/bin/bash +# Copyright 2014 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# addr2line stub for testing of addr2liner. +# Will recognize (and ignore) the -aiCfej options. +# +# Accepts addresses 1000 to 9000 and output multiple frames of the form: +# 0x9000/fun9000/file9000:9000 +# 0x8000/fun8000/file8000:8000 +# 0x7000/fun7000/file7000:7000 +# ... +# 0x1000/fun1000/file1000:1000 +# +# Returns ??/??/??:0 for all other inputs. + +while getopts aiCfe:j: opt; do + case "$opt" in + a|i|C|f|e|j) ;; + *) + echo "unrecognized option: $1" >&2 + exit 1 + esac +done + +while read input +do + address="$input" + + # remove 0x from input. + case "${address}" in + 0x*) + address=$(printf '%x' "$address") + ;; + *) + address=$(printf '%x' "0x$address") + esac + + printf '0x%x\n' "0x$address" + loop=1 + while [ $loop -eq 1 ] + do + # prepare default output. + output2="fun${address}" + output3="file${address}:${address}" + + # specialize output for selected cases. + case "${address}" in + 1000) + output2="_Z3fooid.clone2" + loop=0 + ;; + 2000) + output2="_ZNSaIiEC1Ev.clone18" + address=1000 + ;; + 3000) + output2="_ZNSt6vectorIS_IS_IiSaIiEESaIS1_EESaIS3_EEixEm" + address=2000 + ;; + [4-9]000) + address=$(expr ${address} - 1000) + ;; + *) + output2='??' + output3='??:0' + loop=0 + esac + + echo "$output2" + echo "$output3" + done +done +exit 0 diff --git a/src/internal/binutils/testdata/wrapper/nm b/src/internal/binutils/testdata/wrapper/nm new file mode 100755 index 00000000..c2bcbea7 --- /dev/null +++ b/src/internal/binutils/testdata/wrapper/nm @@ -0,0 +1,62 @@ +#!/bin/bash +# Copyright 2014 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# nm stub for testing of listing. +# Will recognize (and ignore) the -nC options. +# +# Outputs fixed nm output. + +while getopts nC opt; do + case "$opt" in + n) ;; + C) demangle=1;; + *) + echo "unrecognized option: $1" >&2 + exit 1 + esac +done + +if [ $demangle ] +then + cat < 1 { + arg0 := args[0] + if file, err := o.Obj.Open(arg0, 0, ^uint64(0), 0); err == nil { + file.Close() + execName = arg0 + args = args[1:] + } else if *flagBuildID == "" && isBuildID(arg0) { + *flagBuildID = arg0 + args = args[1:] + } + } + + // Report conflicting options + if err := updateFlags(installedFlags); err != nil { + return nil, nil, err + } + + cmd, err := outputFormat(flagCommands, flagParamCommands) + if err != nil { + return nil, nil, err + } + + si := pprofVariables["sample_index"].value + si = sampleIndex(flagTotalDelay, si, "delay", "-total_delay", o.UI) + si = sampleIndex(flagMeanDelay, si, "delay", "-mean_delay", o.UI) + si = sampleIndex(flagContentions, si, "contentions", "-contentions", o.UI) + si = sampleIndex(flagInUseSpace, si, "inuse_space", "-inuse_space", o.UI) + si = sampleIndex(flagInUseObjects, si, "inuse_objects", "-inuse_objects", o.UI) + si = sampleIndex(flagAllocSpace, si, "alloc_space", "-alloc_space", o.UI) + si = sampleIndex(flagAllocObjects, si, "alloc_objects", "-alloc_objects", o.UI) + pprofVariables.set("sample_index", si) + + if *flagMeanDelay { + pprofVariables.set("mean", "true") + } + + source := &source{ + Sources: args, + ExecName: execName, + BuildID: *flagBuildID, + Seconds: *flagSeconds, + Timeout: *flagTimeout, + Symbolize: *flagSymbolize, + } + + for _, s := range *flagBase { + if *s != "" { + source.Base = append(source.Base, *s) + } + } + + if bu, ok := o.Obj.(*binutils.Binutils); ok { + bu.SetTools(*flagTools) + } + return source, cmd, nil +} + +// installFlags creates command line flags for pprof variables. +func installFlags(flag plugin.FlagSet) flagsInstalled { + f := flagsInstalled{ + ints: make(map[string]*int), + bools: make(map[string]*bool), + floats: make(map[string]*float64), + strings: make(map[string]*string), + } + for n, v := range pprofVariables { + switch v.kind { + case boolKind: + if v.group != "" { + // Set all radio variables to false to identify conflicts. + f.bools[n] = flag.Bool(n, false, v.help) + } else { + f.bools[n] = flag.Bool(n, v.boolValue(), v.help) + } + case intKind: + f.ints[n] = flag.Int(n, v.intValue(), v.help) + case floatKind: + f.floats[n] = flag.Float64(n, v.floatValue(), v.help) + case stringKind: + f.strings[n] = flag.String(n, v.value, v.help) + } + } + return f +} + +// updateFlags updates the pprof variables according to the flags +// parsed in the command line. +func updateFlags(f flagsInstalled) error { + vars := pprofVariables + groups := map[string]string{} + for n, v := range f.bools { + vars.set(n, fmt.Sprint(*v)) + if *v { + g := vars[n].group + if g != "" && groups[g] != "" { + return fmt.Errorf("conflicting options %q and %q set", n, groups[g]) + } + groups[g] = n + } + } + for n, v := range f.ints { + vars.set(n, fmt.Sprint(*v)) + } + for n, v := range f.floats { + vars.set(n, fmt.Sprint(*v)) + } + for n, v := range f.strings { + vars.set(n, *v) + } + return nil +} + +type flagsInstalled struct { + ints map[string]*int + bools map[string]*bool + floats map[string]*float64 + strings map[string]*string +} + +// isBuildID determines if the profile may contain a build ID, by +// checking that it is a string of hex digits. +func isBuildID(id string) bool { + return strings.Trim(id, "0123456789abcdefABCDEF") == "" +} + +func sampleIndex(flag *bool, si string, sampleType, option string, ui plugin.UI) string { + if *flag { + if si == "" { + return sampleType + } + ui.PrintErr("Multiple value selections, ignoring ", option) + } + return si +} + +func outputFormat(bcmd map[string]*bool, acmd map[string]*string) (cmd []string, err error) { + for n, b := range bcmd { + if *b { + if cmd != nil { + return nil, fmt.Errorf("must set at most one output format") + } + cmd = []string{n} + } + } + for n, s := range acmd { + if *s != "" { + if cmd != nil { + return nil, fmt.Errorf("must set at most one output format") + } + cmd = []string{n, *s} + } + } + return cmd, nil +} + +var usageMsgHdr = "usage: pprof [options] [-base source] [binary] ...\n" + +var usageMsgSrc = "\n\n" + + " Source options:\n" + + " -seconds Duration for time-based profile collection\n" + + " -timeout Timeout in seconds for profile collection\n" + + " -buildid Override build id for main binary\n" + + " -base source Source of profile to use as baseline\n" + + " profile.pb.gz Profile in compressed protobuf format\n" + + " legacy_profile Profile in legacy pprof format\n" + + " http://host/profile URL for profile handler to retrieve\n" + + " -symbolize= Controls source of symbol information\n" + + " none Do not attempt symbolization\n" + + " local Examine only local binaries\n" + + " fastlocal Only get function names from local binaries\n" + + " remote Do not examine local binaries\n" + + " force Force re-symbolization\n" + + " Binary Local path or build id of binary for symbolization\n" + +var usageMsgVars = "\n\n" + + " Misc options:\n" + + " -tools Search path for object tools\n" + + "\n" + + " Environment Variables:\n" + + " PPROF_TMPDIR Location for temporary files (default $HOME/pprof)\n" + + " PPROF_TOOLS Search path for object-level tools\n" + + " PPROF_BINARY_PATH Search path for local binary files\n" + + " default: $HOME/pprof/binaries\n" + + " finds binaries by $name and $buildid/$name\n" diff --git a/src/internal/driver/commands.go b/src/internal/driver/commands.go new file mode 100644 index 00000000..dca56255 --- /dev/null +++ b/src/internal/driver/commands.go @@ -0,0 +1,580 @@ +// Copyright 2014 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package driver + +import ( + "bytes" + "fmt" + "io" + "os" + "os/exec" + "runtime" + "sort" + "strconv" + "strings" + "time" + + "internal/plugin" + "internal/report" + "profile" + "svg" +) + +// commands describes the commands accepted by pprof. +type commands map[string]*command + +// command describes the actions for a pprof command. Includes a +// function for command-line completion, the report format to use +// during report generation, any postprocessing functions, and whether +// the command expects a regexp parameter (typically a function name). +type command struct { + format int // report format to generate + postProcess PostProcessor // postprocessing to run on report + hasParam bool // collect a parameter from the CLI + description string // single-line description text saying what the command does + usage string // multi-line help text saying how the command is used +} + +// help returns a help string for a command. +func (c *command) help(name string) string { + message := c.description + "\n" + if c.usage != "" { + message += " Usage:\n" + lines := strings.Split(c.usage, "\n") + for _, line := range lines { + message += fmt.Sprintf(" %s\n", line) + } + } + return message + "\n" +} + +func AddCommand(cmd string, format int, post PostProcessor, desc, usage string) { + pprofCommands[cmd] = &command{format, post, false, desc, usage} +} + +// PostProcessor is a function that applies post-processing to the report output +type PostProcessor func(input []byte, output io.Writer, ui plugin.UI) error + +// WaitForVisualizer makes pprof wait for visualizers to complete +// before continuing, returning any errors. +var waitForVisualizer = true + +// pprofCommands are the report generation commands recognized by pprof. +var pprofCommands = commands{ + // Commands that require no post-processing. + "tags": {report.Tags, nil, false, "Outputs all tags in the profile", "tags [tag_regex]* [-ignore_regex]* [>file]\nList tags with key:value matching tag_regex and exclude ignore_regex."}, + "raw": {report.Raw, nil, false, "Outputs a text representation of the raw profile", ""}, + "dot": {report.Dot, nil, false, "Outputs a graph in DOT format", reportHelp("dot", false, true)}, + "top": {report.Text, nil, false, "Outputs top entries in text form", reportHelp("top", true, true)}, + "tree": {report.Tree, nil, false, "Outputs a text rendering of call graph", reportHelp("tree", true, true)}, + "text": {report.Text, nil, false, "Outputs top entries in text form", reportHelp("text", true, true)}, + "traces": {report.Traces, nil, false, "Outputs all profile samples in text form", ""}, + "topproto": {report.TopProto, awayFromTTY("pb.gz"), false, "Outputs top entries in compressed protobuf format", ""}, + "disasm": {report.Dis, nil, true, "Output assembly listings annotated with samples", listHelp("disasm", true)}, + "list": {report.List, nil, true, "Output annotated source for functions matching regexp", listHelp("list", false)}, + "peek": {report.Tree, nil, true, "Output callers/callees of functions matching regexp", "peek func_regex\nDisplay callers and callees of functions matching func_regex."}, + + // Save binary formats to a file + "callgrind": {report.Callgrind, awayFromTTY("callgraph.out"), false, "Outputs a graph in callgrind format", reportHelp("callgrind", false, true)}, + "proto": {report.Proto, awayFromTTY("pb.gz"), false, "Outputs the profile in compressed protobuf format", ""}, + + // Generate report in DOT format and postprocess with dot + "gif": {report.Dot, invokeDot("gif"), false, "Outputs a graph image in GIF format", reportHelp("gif", false, true)}, + "pdf": {report.Dot, invokeDot("pdf"), false, "Outputs a graph in PDF format", reportHelp("pdf", false, true)}, + "png": {report.Dot, invokeDot("png"), false, "Outputs a graph image in PNG format", reportHelp("png", false, true)}, + "ps": {report.Dot, invokeDot("ps"), false, "Outputs a graph in PS format", reportHelp("ps", false, true)}, + + // Save SVG output into a file + "svg": {report.Dot, saveSVGToFile(), false, "Outputs a graph in SVG format", reportHelp("svg", false, true)}, + + // Visualize postprocessed dot output + "eog": {report.Dot, invokeVisualizer(invokeDot("svg"), "svg", []string{"eog"}), false, "Visualize graph through eog", reportHelp("eog", false, false)}, + "evince": {report.Dot, invokeVisualizer(invokeDot("pdf"), "pdf", []string{"evince"}), false, "Visualize graph through evince", reportHelp("evince", false, false)}, + "gv": {report.Dot, invokeVisualizer(invokeDot("ps"), "ps", []string{"gv --noantialias"}), false, "Visualize graph through gv", reportHelp("gv", false, false)}, + "web": {report.Dot, invokeVisualizer(saveSVGToFile(), "svg", browsers()), false, "Visualize graph through web browser", reportHelp("web", false, false)}, + + // Visualize callgrind output + "kcachegrind": {report.Callgrind, invokeVisualizer(nil, "grind", kcachegrind), false, "Visualize report in KCachegrind", reportHelp("kcachegrind", false, false)}, + + // Visualize HTML directly generated by report. + "weblist": {report.WebList, invokeVisualizer(awayFromTTY("html"), "html", browsers()), true, "Display annotated source in a web browser", listHelp("weblist", false)}, +} + +// pprofVariables are the configuration parameters that affect the +// reported generated by pprof. +var pprofVariables = variables{ + // Filename for file-based output formats, stdout by default. + "output": &variable{stringKind, "", "", helpText("Output filename for file-based outputs")}, + + // Comparisons. + "drop_negative": &variable{boolKind, "f", "", helpText( + "Ignore negative differences", + "Do not show any locations with values <0.")}, + + // Comparisons. + "positive_percentages": &variable{boolKind, "f", "", helpText( + "Ignore negative samples when computing percentages", + " Do not count negative samples when computing the total value", + " of the profile, used to compute percentages. If set, and the -base", + " option is used, percentages reported will be computed against the", + " main profile, ignoring the base profile.")}, + + // Graph handling options. + "call_tree": &variable{boolKind, "f", "", helpText( + "Create a context-sensitive call tree", + "Treat locations reached through different paths as separate.")}, + + // Display options. + "relative_percentages": &variable{boolKind, "f", "", helpText( + "Show percentages relative to focused subgraph", + "If unset, percentages are relative to full graph before focusing", + "to facilitate comparison with original graph.")}, + "unit": &variable{stringKind, "minimum", "", helpText( + "Measurement units to display", + "Scale the sample values to this unit.", + " For time-based profiles, use seconds, milliseconds, nanoseconds, etc.", + " For memory profiles, use megabytes, kiloyes, bytes, etc.", + " auto will scale each value independently to the most natural unit.")}, + "compact_labels": &variable{boolKind, "f", "", "Show minimal headers"}, + + // Filtering options + "nodecount": &variable{intKind, "-1", "", helpText( + "Max number of nodes to show", + "Uses heuristics to limit the number of locations to be displayed.", + "On graphs, dotted edges represent paths through nodes that have been removed.")}, + "nodefraction": &variable{floatKind, "0.005", "", "Hide nodes below *total"}, + "edgefraction": &variable{floatKind, "0.001", "", "Hide edges below *total"}, + "trim": &variable{boolKind, "t", "", helpText( + "Honor nodefraction/edgefraction/nodecount defaults", + "Set to false to get the full profile, without any trimming.")}, + "focus": &variable{stringKind, "", "", helpText( + "Restricts to samples going through a node matching regexp", + "Discard samples that do not include a node matching this regexp.", + "Matching includes the function name, filename or object name.")}, + "ignore": &variable{stringKind, "", "", helpText( + "Skips paths going through any nodes matching regexp", + "If set, discard samples that include a node matching this regexp.", + "Matching includes the function name, filename or object name.")}, + "prune_from": &variable{stringKind, "", "", helpText( + "Drops any functions below the matched frame.", + "If set, any frames matching the specified regexp and any frames", + "below it will be dropped from each sample.")}, + "hide": &variable{stringKind, "", "", helpText( + "Skips nodes matching regexp", + "Discard nodes that match this location.", + "Other nodes from samples that include this location will be shown.", + "Matching includes the function name, filename or object name.")}, + "show": &variable{stringKind, "", "", helpText( + "Only show nodes matching regexp", + "If set, only show nodes that match this location.", + "Matching includes the function name, filename or object name.")}, + "tagfocus": &variable{stringKind, "", "", helpText( + "Restrict to samples with tags in range or matched by regexp", + "Discard samples that do not include a node with a tag matching this regexp.")}, + "tagignore": &variable{stringKind, "", "", helpText( + "Discard samples with tags in range or matched by regexp", + "Discard samples that do include a node with a tag matching this regexp.")}, + + // Heap profile options + "divide_by": &variable{floatKind, "1", "", helpText( + "Ratio to divide all samples before visualization", + "Divide all samples values by a constant, eg the number of processors or jobs.")}, + "mean": &variable{boolKind, "f", "", helpText( + "Average sample value over first value (count)", + "For memory profiles, report average memory per allocation.", + "For time-based profiles, report average time per event.")}, + "sample_index": &variable{stringKind, "", "", helpText( + "Sample value to report", + "Profiles contain multiple values per sample.", + "Use sample_value=index to select the ith value or select it by name.")}, + + // Data sorting criteria + "flat": &variable{boolKind, "t", "cumulative", helpText("Sort entries based on own weight")}, + "cum": &variable{boolKind, "f", "cumulative", helpText("Sort entries based on cumulative weight")}, + + // Output granularity + "functions": &variable{boolKind, "t", "granularity", helpText( + "Aggregate at the function level.", + "Takes into account the filename/lineno where the function was defined.")}, + "functionnameonly": &variable{boolKind, "f", "granularity", helpText( + "Aggregate at the function level.", + "Ignores the filename/lineno where the function was defined.")}, + "files": &variable{boolKind, "f", "granularity", "Aggregate at the file level."}, + "lines": &variable{boolKind, "f", "granularity", "Aggregate at the source code line level."}, + "addresses": &variable{boolKind, "f", "granularity", helpText( + "Aggregate at the function level.", + "Includes functions' addresses in the output.")}, + "noinlines": &variable{boolKind, "f", "granularity", helpText( + "Aggregate at the function level.", + "Attributes inlined functions to their first out-of-line caller.")}, + "addressnoinlines": &variable{boolKind, "f", "granularity", helpText( + "Aggregate at the function level, including functions' addresses in the output.", + "Attributes inlined functions to their first out-of-line caller.")}, +} + +func helpText(s ...string) string { + return strings.Join(s, "\n") + "\n" +} + +// usage returns a string describing the pprof commands and variables. +// if commandLine is set, the output reflect cli usage. +func usage(commandLine bool) string { + var prefix string + if commandLine { + prefix = "-" + } + fmtHelp := func(c, d string) string { + return fmt.Sprintf(" %-16s %s", c, strings.SplitN(d, "\n", 2)[0]) + } + + var commands []string + for name, cmd := range pprofCommands { + commands = append(commands, fmtHelp(prefix+name, cmd.description)) + } + sort.Strings(commands) + + var help string + if commandLine { + help = " Output formats (select only one):\n" + } else { + help = " Commands:\n" + commands = append(commands, fmtHelp("quit/exit/^D", "Exit pprof")) + } + + help = help + strings.Join(commands, "\n") + "\n\n" + + " Options:\n" + + // Print help for variables after sorting them. + // Collect radio variables by their group name to print them together. + radioOptions := make(map[string][]string) + var variables []string + for name, vr := range pprofVariables { + if vr.group != "" { + radioOptions[vr.group] = append(radioOptions[vr.group], name) + continue + } + variables = append(variables, fmtHelp(prefix+name, vr.help)) + } + sort.Strings(variables) + + help = help + strings.Join(variables, "\n") + "\n\n" + + " Option groups (only set one per group):\n" + + var radioStrings []string + for radio, ops := range radioOptions { + sort.Strings(ops) + s := []string{fmtHelp(radio, "")} + for _, op := range ops { + s = append(s, " "+fmtHelp(prefix+op, pprofVariables[op].help)) + } + + radioStrings = append(radioStrings, strings.Join(s, "\n")) + } + sort.Strings(radioStrings) + return help + strings.Join(radioStrings, "\n") +} + +func reportHelp(c string, cum, redirect bool) string { + h := []string{ + c + " [n] [focus_regex]* [-ignore_regex]*", + "Include up to n samples", + "Include samples matching focus_regex, and exclude ignore_regex.", + } + if cum { + h[0] += " [-cum]" + h = append(h, "-cum sorts the output by cumulative weight") + } + if redirect { + h[0] += " >f" + h = append(h, "Optionally save the report on the file f") + } + return strings.Join(h, "\n") +} + +func listHelp(c string, redirect bool) string { + h := []string{ + c + " [-focus_regex]* [-ignore_regex]*", + "Include functions matching func_regex, or including the address specified.", + "Include samples matching focus_regex, and exclude ignore_regex.", + } + if redirect { + h[0] += " >f" + h = append(h, "Optionally save the report on the file f") + } + return strings.Join(h, "\n") +} + +// browsers returns a list of commands to attempt for web visualization. +func browsers() []string { + cmds := []string{"chrome", "google-chrome", "firefox"} + switch runtime.GOOS { + case "darwin": + return append(cmds, "/usr/bin/open") + case "windows": + return append(cmds, "cmd /c start") + default: + user_browser := os.Getenv("BROWSER") + if user_browser != "" { + cmds = append([]string{user_browser, "sensible-browser"}, cmds...) + } else { + cmds = append([]string{"sensible-browser"}, cmds...) + } + return append(cmds, "xdg-open") + } +} + +var kcachegrind = []string{"kcachegrind"} + +// awayFromTTY saves the output in a file if it would otherwise go to +// the terminal screen. This is used to avoid dumping binary data on +// the screen. +func awayFromTTY(format string) PostProcessor { + return func(input []byte, output io.Writer, ui plugin.UI) error { + if output == os.Stdout && ui.IsTerminal() { + tempFile, err := newTempFile("", "profile", "."+format) + if err != nil { + return err + } + ui.PrintErr("Generating report in ", tempFile.Name()) + _, err = fmt.Fprint(tempFile, string(input)) + return err + } + _, err := fmt.Fprint(output, string(input)) + return err + } +} + +func invokeDot(format string) PostProcessor { + divert := awayFromTTY(format) + return func(input []byte, output io.Writer, ui plugin.UI) error { + cmd := exec.Command("dot", "-T"+format) + var buf bytes.Buffer + cmd.Stdin, cmd.Stdout, cmd.Stderr = bytes.NewBuffer(input), &buf, os.Stderr + if err := cmd.Run(); err != nil { + return fmt.Errorf("Failed to execute dot. Is Graphviz installed? Error: %v", err) + } + return divert(buf.Bytes(), output, ui) + } +} + +func saveSVGToFile() PostProcessor { + generateSVG := invokeDot("svg") + divert := awayFromTTY("svg") + return func(input []byte, output io.Writer, ui plugin.UI) error { + baseSVG := &bytes.Buffer{} + if err := generateSVG(input, baseSVG, ui); err != nil { + return err + } + + return divert([]byte(svg.Massage(*baseSVG)), output, ui) + } +} + +func invokeVisualizer(format PostProcessor, suffix string, visualizers []string) PostProcessor { + return func(input []byte, output io.Writer, ui plugin.UI) error { + if output != os.Stdout { + if format != nil { + return format(input, output, ui) + } + _, err := fmt.Fprint(output, string(input)) + return err + } + + tempFile, err := newTempFile(os.Getenv("PPROF_TMPDIR"), "pprof", "."+suffix) + if err != nil { + return err + } + deferDeleteTempFile(tempFile.Name()) + if format != nil { + if err := format(input, tempFile, ui); err != nil { + return err + } + } else { + if _, err := fmt.Fprint(tempFile, string(input)); err != nil { + return err + } + } + tempFile.Close() + // Try visualizers until one is successful + for _, v := range visualizers { + // Separate command and arguments for exec.Command. + args := strings.Split(v, " ") + if len(args) == 0 { + continue + } + viewer := exec.Command(args[0], append(args[1:], tempFile.Name())...) + viewer.Stderr = os.Stderr + if err = viewer.Start(); err == nil { + // Wait for a second so that the visualizer has a chance to + // open the input file. This needs to be done even if we're + // waiting for the visualizer as it can be just a wrapper that + // spawns a browser tab and returns right away. + defer func(t <-chan time.Time) { + <-t + }(time.After(time.Second)) + if waitForVisualizer { + return viewer.Wait() + } + return nil + } + } + return err + } +} + +// locateSampleIndex returns the appropriate index for a value of sample index. +// If numeric, it returns the number, otherwise it looks up the text in the +// profile sample types. +func locateSampleIndex(p *profile.Profile, sampleIndex string) (int, error) { + if sampleIndex == "" { + // By default select the last sample value + return len(p.SampleType) - 1, nil + } + if i, err := strconv.Atoi(sampleIndex); err == nil { + if i < 0 || i >= len(p.SampleType) { + return 0, fmt.Errorf("sample_index %s is outside the range [0..%d]", sampleIndex, len(p.SampleType)-1) + } + return i, nil + } + + // Remove the inuse_ prefix to support legacy pprof options + // "inuse_space" and "inuse_objects" for profiles containing types + // "space" and "objects". + noInuse := strings.TrimPrefix(sampleIndex, "inuse_") + sampleTypes := make([]string, len(p.SampleType)) + for i, t := range p.SampleType { + if t.Type == sampleIndex || t.Type == noInuse { + return i, nil + } + sampleTypes[i] = t.Type + } + + return 0, fmt.Errorf("sample_index %q must be one of: %v", sampleIndex, sampleTypes) +} + +// variables describe the configuration parameters recognized by pprof. +type variables map[string]*variable + +// variable is a single configuration parameter. +type variable struct { + kind int // How to interpret the value, must be one of the enums below. + value string // Effective value. Only values appropriate for the Kind should be set. + group string // boolKind variables with the same Group != "" cannot be set simultaneously. + help string // Text describing the variable, in multiple lines separated by newline. +} + +const ( + // variable.kind must be one of these variables. + boolKind = iota + intKind + floatKind + stringKind +) + +// set updates the value of a variable, checking that the value is +// suitable for the variable Kind. +func (vars variables) set(name, value string) error { + v := vars[name] + if v == nil { + return fmt.Errorf("no variable %s", name) + } + var err error + switch v.kind { + case boolKind: + var b bool + if b, err = stringToBool(value); err == nil { + if v.group != "" && b == false { + err = fmt.Errorf("%q can only be set to true", name) + } + } + case intKind: + _, err = strconv.Atoi(value) + case floatKind: + _, err = strconv.ParseFloat(value, 64) + } + if err != nil { + return err + } + vars[name].value = value + if group := vars[name].group; group != "" { + for vname, vvar := range vars { + if vvar.group == group && vname != name { + vvar.value = "f" + } + } + } + return err +} + +// boolValue returns the value of a boolean variable. +func (v *variable) boolValue() bool { + b, err := stringToBool(v.value) + if err != nil { + panic("unexpected value " + v.value + " for bool ") + } + return b +} + +// intValue returns the value of an intKind variable. +func (v *variable) intValue() int { + i, err := strconv.Atoi(v.value) + if err != nil { + panic("unexpected value " + v.value + " for int ") + } + return i +} + +// floatValue returns the value of a Float variable. +func (v *variable) floatValue() float64 { + f, err := strconv.ParseFloat(v.value, 64) + if err != nil { + panic("unexpected value " + v.value + " for float ") + } + return f +} + +// stringValue returns a canonical representation for a variable. +func (v *variable) stringValue() string { + switch v.kind { + case boolKind: + return fmt.Sprint(v.boolValue()) + case intKind: + return fmt.Sprint(v.intValue()) + case floatKind: + return fmt.Sprint(v.floatValue()) + } + return v.value +} + +func stringToBool(s string) (bool, error) { + switch strings.ToLower(s) { + case "true", "t", "yes", "y", "1", "": + return true, nil + case "false", "f", "no", "n", "0": + return false, nil + default: + return false, fmt.Errorf(`illegal value "%s" for bool variable`, s) + } +} + +// makeCopy returns a duplicate of a set of shell variables. +func (vars variables) makeCopy() variables { + varscopy := make(variables, len(vars)) + for n, v := range vars { + vcopy := *v + varscopy[n] = &vcopy + } + return varscopy +} diff --git a/src/internal/driver/driver.go b/src/internal/driver/driver.go new file mode 100644 index 00000000..dd8bbbec --- /dev/null +++ b/src/internal/driver/driver.go @@ -0,0 +1,276 @@ +// Copyright 2014 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package driver implements the core pprof functionality. It can be +// parameterized with a flag implementation, fetch and symbolize +// mechanisms. +package driver + +import ( + "bytes" + "fmt" + "io" + "os" + "path/filepath" + "regexp" + + "internal/plugin" + "profile" + "internal/report" +) + +// PProf acquires a profile, and symbolizes it using a profile +// manager. Then it generates a report formatted according to the +// options selected through the flags package. +func PProf(eo *plugin.Options) error { + // Remove any temporary files created during pprof processing. + defer cleanupTempFiles() + + o := setDefaults(eo) + + src, cmd, err := parseFlags(o) + if err != nil { + return err + } + + p, err := fetchProfiles(src, o) + if err != nil { + return err + } + + if cmd != nil { + return generateReport(p, cmd, pprofVariables, o) + } + + return interactive(p, o) +} + +func generateReport(p *profile.Profile, cmd []string, vars variables, o *plugin.Options) error { + p = p.Copy() // Prevent modification to the incoming profile. + + var w io.Writer + switch output := vars["output"].value; output { + case "": + w = os.Stdout + default: + o.UI.PrintErr("Generating report in ", output) + outputFile, err := o.Writer.Open(output) + if err != nil { + return err + } + defer outputFile.Close() + w = outputFile + } + + vars = applyCommandOverrides(cmd, vars) + + // Delay focus after configuring report to get percentages on all samples. + relative := vars["relative_percentages"].boolValue() + if relative { + if err := applyFocus(p, vars, o.UI); err != nil { + return err + } + } + ropt, err := reportOptions(p, vars) + if err != nil { + return err + } + c := pprofCommands[cmd[0]] + if c == nil { + panic("unexpected nil command") + } + ropt.OutputFormat = c.format + post := c.postProcess + if len(cmd) == 2 { + s, err := regexp.Compile(cmd[1]) + if err != nil { + return fmt.Errorf("parsing argument regexp %s: %v", cmd[1], err) + } + ropt.Symbol = s + } + + rpt := report.New(p, ropt) + if !relative { + if err := applyFocus(p, vars, o.UI); err != nil { + return err + } + } + + if err := aggregate(p, vars); err != nil { + return err + } + + if post == nil { + return report.Generate(w, rpt, o.Obj) + } + + // Capture output into buffer and send to postprocessing command. + buf := &bytes.Buffer{} + if err := report.Generate(buf, rpt, o.Obj); err != nil { + return err + } + return post(buf.Bytes(), w, o.UI) +} + +func applyCommandOverrides(cmd []string, v variables) variables { + trim, focus, hide := v["trim"].boolValue(), true, true + + switch cmd[0] { + case "proto", "raw": + trim, focus, hide = false, false, false + v.set("addresses", "t") + case "disasm", "weblist": + trim = false + v.set("addressnoinlines", "t") + case "peek": + trim, focus, hide = false, false, false + case "callgrind", "list": + v.set("nodecount", "0") + v.set("lines", "t") + case "text", "top", "topproto": + if v["nodecount"].intValue() == -1 { + v.set("nodecount", "0") + } + default: + if v["nodecount"].intValue() == -1 { + v.set("nodecount", "80") + } + } + if trim == false { + v.set("nodecount", "0") + v.set("nodefraction", "0") + v.set("edgefraction", "0") + } + if focus == false { + v.set("focus", "") + v.set("ignore", "") + v.set("tagfocus", "") + v.set("tagignore", "") + } + if hide == false { + v.set("hide", "") + v.set("show", "") + } + return v +} + +func aggregate(prof *profile.Profile, v variables) error { + var inlines, function, filename, linenumber, address bool + switch { + case v["addresses"].boolValue(): + return nil + case v["lines"].boolValue(): + inlines = true + function = true + filename = true + linenumber = true + case v["files"].boolValue(): + inlines = true + filename = true + case v["functions"].boolValue(): + inlines = true + function = true + filename = true + case v["noinlines"].boolValue(): + function = true + filename = true + case v["addressnoinlines"].boolValue(): + function = true + filename = true + linenumber = true + address = true + case v["functionnameonly"].boolValue(): + inlines = true + function = true + default: + return fmt.Errorf("unexpected granularity") + } + return prof.Aggregate(inlines, function, filename, linenumber, address) +} + +func reportOptions(p *profile.Profile, vars variables) (*report.Options, error) { + si, mean := vars["sample_index"].value, vars["mean"].boolValue() + value, sample, err := sampleFormat(p, si, mean) + if err != nil { + return nil, err + } + + stype := sample.Type + if mean { + stype = "mean_" + stype + } + + if vars["divide_by"].floatValue() == 0 { + return nil, fmt.Errorf("zero divisor specified") + } + + ropt := &report.Options{ + CumSort: vars["cum"].boolValue(), + CallTree: vars["call_tree"].boolValue(), + DropNegative: vars["drop_negative"].boolValue(), + PositivePercentages: vars["positive_percentages"].boolValue(), + + CompactLabels: vars["compact_labels"].boolValue(), + Ratio: 1 / vars["divide_by"].floatValue(), + + NodeCount: vars["nodecount"].intValue(), + NodeFraction: vars["nodefraction"].floatValue(), + EdgeFraction: vars["edgefraction"].floatValue(), + + SampleValue: value, + SampleType: stype, + SampleUnit: sample.Unit, + + OutputUnit: vars["unit"].value, + } + + if len(p.Mapping) > 0 { + ropt.Title = filepath.Base(p.Mapping[0].File) + } + + return ropt, nil +} + +type sampleValueFunc func([]int64) int64 + +// sampleFormat returns a function to extract values out of a profile.Sample, +// and the type/units of those values. +func sampleFormat(p *profile.Profile, sampleIndex string, mean bool) (sampleValueFunc, *profile.ValueType, error) { + if len(p.SampleType) == 0 { + return nil, nil, fmt.Errorf("profile has no samples") + } + index, err := locateSampleIndex(p, sampleIndex) + if err != nil { + return nil, nil, err + } + if mean { + return meanExtractor(index), p.SampleType[index], nil + } + return valueExtractor(index), p.SampleType[index], nil +} + +func valueExtractor(ix int) sampleValueFunc { + return func(v []int64) int64 { + return v[ix] + } +} + +func meanExtractor(ix int) sampleValueFunc { + return func(v []int64) int64 { + if v[0] == 0 { + return 0 + } + return v[ix] / v[0] + } +} diff --git a/src/internal/driver/driver_focus.go b/src/internal/driver/driver_focus.go new file mode 100644 index 00000000..6179efe6 --- /dev/null +++ b/src/internal/driver/driver_focus.go @@ -0,0 +1,168 @@ +// Copyright 2014 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package driver + +import ( + "fmt" + "regexp" + "strconv" + "strings" + + "internal/measurement" + "internal/plugin" + "profile" +) + +var tagFilterRangeRx = regexp.MustCompile("([[:digit:]]+)([[:alpha:]]+)") + +// applyFocus filters samples based on the focus/ignore options +func applyFocus(prof *profile.Profile, v variables, ui plugin.UI) error { + focus, err := compileRegexOption("focus", v["focus"].value, nil) + ignore, err := compileRegexOption("ignore", v["ignore"].value, err) + hide, err := compileRegexOption("hide", v["hide"].value, err) + show, err := compileRegexOption("show", v["show"].value, err) + tagfocus, err := compileTagFilter("tagfocus", v["tagfocus"].value, ui, err) + tagignore, err := compileTagFilter("tagignore", v["tagignore"].value, ui, err) + prunefrom, err := compileRegexOption("prune_from", v["prune_from"].value, err) + if err != nil { + return err + } + + fm, im, hm, hnm := prof.FilterSamplesByName(focus, ignore, hide, show) + warnNoMatches(focus == nil || fm, "Focus", ui) + warnNoMatches(ignore == nil || im, "Ignore", ui) + warnNoMatches(hide == nil || hm, "Hide", ui) + warnNoMatches(show == nil || hnm, "Show", ui) + + tfm, tim := prof.FilterSamplesByTag(tagfocus, tagignore) + warnNoMatches(tagfocus == nil || tfm, "TagFocus", ui) + warnNoMatches(tagignore == nil || tim, "TagIgnore", ui) + + if prunefrom != nil { + prof.PruneFrom(prunefrom) + } + return nil +} + +func compileRegexOption(name, value string, err error) (*regexp.Regexp, error) { + if value == "" || err != nil { + return nil, err + } + rx, err := regexp.Compile(value) + if err != nil { + return nil, fmt.Errorf("parsing %s regexp: %v", name, err) + } + return rx, nil +} + +func compileTagFilter(name, value string, ui plugin.UI, err error) (func(*profile.Sample) bool, error) { + if value == "" || err != nil { + return nil, err + } + if numFilter := parseTagFilterRange(value); numFilter != nil { + ui.PrintErr(name, ":Interpreted '", value, "' as range, not regexp") + return func(s *profile.Sample) bool { + for key, vals := range s.NumLabel { + for _, val := range vals { + if numFilter(val, key) { + return true + } + } + } + return false + }, nil + } + var rfx []*regexp.Regexp + for _, tagf := range strings.Split(value, ",") { + fx, err := regexp.Compile(tagf) + if err != nil { + return nil, fmt.Errorf("parsing %s regexp: %v", name, err) + } + rfx = append(rfx, fx) + } + return func(s *profile.Sample) bool { + matchedrx: + for _, rx := range rfx { + for key, vals := range s.Label { + for _, val := range vals { + if rx.MatchString(key + ":" + val) { + continue matchedrx + } + } + } + return false + } + return true + }, nil +} + +// parseTagFilterRange returns a function to checks if a value is +// contained on the range described by a string. It can recognize +// strings of the form: +// "32kb" -- matches values == 32kb +// ":64kb" -- matches values <= 64kb +// "4mb:" -- matches values >= 4mb +// "12kb:64mb" -- matches values between 12kb and 64mb (both included). +func parseTagFilterRange(filter string) func(int64, string) bool { + ranges := tagFilterRangeRx.FindAllStringSubmatch(filter, 2) + if len(ranges) == 0 { + return nil // No ranges were identified + } + v, err := strconv.ParseInt(ranges[0][1], 10, 64) + if err != nil { + panic(fmt.Errorf("Failed to parse int %s: %v", ranges[0][1], err)) + } + scaledValue, unit := measurement.Scale(v, ranges[0][2], ranges[0][2]) + if len(ranges) == 1 { + switch match := ranges[0][0]; filter { + case match: + return func(v int64, u string) bool { + sv, su := measurement.Scale(v, u, unit) + return su == unit && sv == scaledValue + } + case match + ":": + return func(v int64, u string) bool { + sv, su := measurement.Scale(v, u, unit) + return su == unit && sv >= scaledValue + } + case ":" + match: + return func(v int64, u string) bool { + sv, su := measurement.Scale(v, u, unit) + return su == unit && sv <= scaledValue + } + } + return nil + } + if filter != ranges[0][0]+":"+ranges[1][0] { + return nil + } + if v, err = strconv.ParseInt(ranges[1][1], 10, 64); err != nil { + panic(fmt.Errorf("Failed to parse int %s: %v", ranges[1][1], err)) + } + scaledValue2, unit2 := measurement.Scale(v, ranges[1][2], unit) + if unit != unit2 { + return nil + } + return func(v int64, u string) bool { + sv, su := measurement.Scale(v, u, unit) + return su == unit && sv >= scaledValue && sv <= scaledValue2 + } +} + +func warnNoMatches(match bool, option string, ui plugin.UI) { + if !match { + ui.PrintErr(option + " expression matched no samples") + } +} diff --git a/src/internal/driver/driver_test.go b/src/internal/driver/driver_test.go new file mode 100644 index 00000000..3eb32b89 --- /dev/null +++ b/src/internal/driver/driver_test.go @@ -0,0 +1,1004 @@ +// Copyright 2014 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package driver + +import ( + "bytes" + "fmt" + "io/ioutil" + "os" + "strconv" + "strings" + "testing" + "time" + + "internal/binutils" + "internal/plugin" + "profile" + "internal/proftest" + "internal/symbolz" +) + +func TestParse(t *testing.T) { + // Override weblist command to collect output in buffer + pprofCommands["weblist"].postProcess = nil + + // Update PATH to use fake dot for svg output + os.Setenv("PATH", "testdata/wrapper"+":"+os.Getenv("PATH")) + + testcase := []struct { + flags, source string + }{ + {"text,functions,flat", "cpu"}, + {"tree,addresses,flat,nodecount=4", "cpusmall"}, + {"text,functions,flat", "unknown"}, + {"text,alloc_objects,flat", "heap_alloc"}, + {"text,files,flat", "heap"}, + {"text,inuse_objects,flat", "heap"}, + {"text,lines,cum,hide=line[X3]0", "cpu"}, + {"text,lines,cum,show=[12]00", "cpu"}, + {"topproto,lines,cum,hide=mangled[X3]0", "cpu"}, + {"tree,lines,cum,focus=[24]00", "heap"}, + {"tree,relative_percentages,cum,focus=[24]00", "heap"}, + {"callgrind", "cpu"}, + {"callgrind", "heap"}, + {"dot,functions,flat", "cpu"}, + {"dot,lines,flat,focus=[12]00", "heap"}, + {"dot,addresses,flat,ignore=[X3]002,focus=[X1]000", "contention"}, + {"dot,files,cum", "contention"}, + {"svg", "cpu"}, + {"tags", "cpu"}, + {"tags,tagignore=tag[13],tagfocus=key[12]", "cpu"}, + {"traces", "cpu"}, + {"dot,alloc_space,flat,focus=[234]00", "heap_alloc"}, + {"dot,alloc_space,flat,hide=line.*1?23?", "heap_alloc"}, + {"dot,inuse_space,flat,tagfocus=1mb:2gb", "heap"}, + {"dot,inuse_space,flat,tagfocus=30kb:,tagignore=1mb:2mb", "heap"}, + {"disasm=line[13],addresses,flat", "cpu"}, + {"peek=line.*01", "cpu"}, + {"weblist=line[13],addresses,flat", "cpu"}, + } + + baseVars := pprofVariables + defer func() { pprofVariables = baseVars }() + for _, tc := range testcase { + // Reset the pprof variables before processing + pprofVariables = baseVars.makeCopy() + + f := baseFlags() + f.args = []string{tc.source} + + flags := strings.Split(tc.flags, ",") + + // Skip the output format in the first flag, to output to a proto + addFlags(&f, flags[1:]) + + // Encode profile into a protobuf and decode it again. + protoTempFile, err := ioutil.TempFile("", "profile_proto") + if err != nil { + t.Errorf("cannot create tempfile: %v", err) + } + defer protoTempFile.Close() + f.strings["output"] = protoTempFile.Name() + + if flags[0] == "topproto" { + f.bools["proto"] = false + f.bools["topproto"] = true + } + + // First pprof invocation to save the profile into a profile.proto. + o1 := setDefaults(nil) + o1.Flagset = f + o1.Fetch = testFetcher{} + o1.Sym = testSymbolizer{} + if err := PProf(o1); err != nil { + t.Errorf("%s %q: %v", tc.source, tc.flags, err) + continue + } + // Reset the pprof variables after the proto invocation + pprofVariables = baseVars.makeCopy() + + // Read the profile from the encoded protobuf + outputTempFile, err := ioutil.TempFile("", "profile_output") + if err != nil { + t.Errorf("cannot create tempfile: %v", err) + } + defer outputTempFile.Close() + f.strings["output"] = outputTempFile.Name() + f.args = []string{protoTempFile.Name()} + + var solution string + // Apply the flags for the second pprof run, and identify name of + // the file containing expected results + if flags[0] == "topproto" { + solution = solutionFilename(tc.source, &f) + delete(f.bools, "topproto") + f.bools["text"] = true + } else { + delete(f.bools, "proto") + addFlags(&f, flags[:1]) + solution = solutionFilename(tc.source, &f) + } + + // Second pprof invocation to read the profile from profile.proto + // and generate a report. + o2 := setDefaults(nil) + o2.Flagset = f + o2.Sym = testSymbolizeDemangler{} + o2.Obj = new(binutils.Binutils) + + if err := PProf(o2); err != nil { + t.Errorf("%s: %v", tc.source, err) + } + b, err := ioutil.ReadFile(outputTempFile.Name()) + if err != nil { + t.Errorf("Failed to read profile %s: %v", outputTempFile.Name(), err) + } + + // Read data file with expected solution + solution = "testdata/" + solution + sbuf, err := ioutil.ReadFile(solution) + if err != nil { + t.Errorf("reading solution file %s: %v", solution, err) + continue + } + + if flags[0] == "svg" { + b = removeScripts(b) + sbuf = removeScripts(sbuf) + } + + if string(b) != string(sbuf) { + t.Errorf("diff %s %s", solution, tc.source) + d, err := proftest.Diff(sbuf, b) + if err != nil { + t.Fatalf("diff %s %v", solution, err) + } + t.Errorf("%s\n%s\n", solution, d) + } + } +} + +// removeScripts removes pairs from its input +func removeScripts(in []byte) []byte { + beginMarker := []byte("") + + if begin := bytes.Index(in, beginMarker); begin > 0 { + if end := bytes.Index(in[begin:], endMarker); end > 0 { + in = append(in[:begin], removeScripts(in[begin+end+len(endMarker):])...) + } + } + return in +} + +// addFlags parses flag descriptions and adds them to the testFlags +func addFlags(f *testFlags, flags []string) { + for _, flag := range flags { + fields := strings.SplitN(flag, "=", 2) + switch len(fields) { + case 1: + f.bools[fields[0]] = true + case 2: + if i, err := strconv.Atoi(fields[1]); err == nil { + f.ints[fields[0]] = i + } else { + f.strings[fields[0]] = fields[1] + } + } + } +} + +// solutionFilename returns the name of the solution file for the test +func solutionFilename(source string, f *testFlags) string { + name := []string{"pprof", strings.TrimPrefix(source, "http://host:8000/")} + name = addString(name, f, []string{"flat", "cum"}) + name = addString(name, f, []string{"functions", "files", "lines", "addresses"}) + name = addString(name, f, []string{"inuse_space", "inuse_objects", "alloc_space", "alloc_objects"}) + name = addString(name, f, []string{"relative_percentages"}) + name = addString(name, f, []string{"seconds"}) + name = addString(name, f, []string{"text", "tree", "callgrind", "dot", "svg", "tags", "dot", "traces", "disasm", "peek", "weblist", "topproto"}) + if f.strings["focus"] != "" || f.strings["tagfocus"] != "" { + name = append(name, "focus") + } + if f.strings["ignore"] != "" || f.strings["tagignore"] != "" { + name = append(name, "ignore") + } + name = addString(name, f, []string{"hide", "show"}) + + return strings.Join(name, ".") +} + +func addString(name []string, f *testFlags, components []string) []string { + for _, c := range components { + if f.bools[c] || f.strings[c] != "" || f.ints[c] != 0 { + return append(name, c) + } + } + return name +} + +// testFlags implements the plugin.FlagSet interface. +type testFlags struct { + bools map[string]bool + ints map[string]int + floats map[string]float64 + strings map[string]string + args []string +} + +func (testFlags) ExtraUsage() string { return "" } + +func (f testFlags) Bool(s string, d bool, c string) *bool { + if b, ok := f.bools[s]; ok { + return &b + } + return &d +} + +func (f testFlags) Int(s string, d int, c string) *int { + if i, ok := f.ints[s]; ok { + return &i + } + return &d +} + +func (f testFlags) Float64(s string, d float64, c string) *float64 { + if g, ok := f.floats[s]; ok { + return &g + } + return &d +} + +func (f testFlags) String(s, d, c string) *string { + if t, ok := f.strings[s]; ok { + return &t + } + return &d +} + +func (f testFlags) BoolVar(p *bool, s string, d bool, c string) { + if b, ok := f.bools[s]; ok { + *p = b + } else { + *p = d + } +} + +func (f testFlags) IntVar(p *int, s string, d int, c string) { + if i, ok := f.ints[s]; ok { + *p = i + } else { + *p = d + } +} + +func (f testFlags) Float64Var(p *float64, s string, d float64, c string) { + if g, ok := f.floats[s]; ok { + *p = g + } else { + *p = d + } +} + +func (f testFlags) StringVar(p *string, s, d, c string) { + if t, ok := f.strings[s]; ok { + *p = t + } else { + *p = d + } +} + +func (f testFlags) StringList(s, d, c string) *[]*string { + return &[]*string{} +} + +func (f testFlags) Parse(func()) []string { + return f.args +} + +func baseFlags() testFlags { + return testFlags{ + bools: map[string]bool{ + "proto": true, + "trim": true, + "compact_labels": true, + }, + ints: map[string]int{ + "nodecount": 20, + }, + floats: map[string]float64{ + "nodefraction": 0.05, + "edgefraction": 0.01, + "divide_by": 1.0, + }, + strings: map[string]string{ + "tools": "testdata/wrapper", + "unit": "minimum", + }, + } +} + +type testProfile struct { +} + +const testStart = 0x1000 +const testOffset = 0x5000 + +type testFetcher struct{} + +func (testFetcher) Fetch(s string, d, t time.Duration) (*profile.Profile, string, error) { + var p *profile.Profile + s = strings.TrimPrefix(s, "http://host:8000/") + switch s { + case "cpu", "unknown": + p = cpuProfile() + case "cpusmall": + p = cpuProfileSmall() + case "heap": + p = heapProfile() + case "heap_alloc": + p = heapProfile() + p.SampleType = []*profile.ValueType{ + {Type: "alloc_objects", Unit: "count"}, + {Type: "alloc_space", Unit: "bytes"}, + } + case "contention": + p = contentionProfile() + case "symbolz": + p = symzProfile() + case "http://host2/symbolz": + p = symzProfile() + p.Mapping[0].Start += testOffset + p.Mapping[0].Limit += testOffset + for i := range p.Location { + p.Location[i].Address += testOffset + } + default: + return nil, "", fmt.Errorf("unexpected source: %s", s) + } + return p, s, nil +} + +type testSymbolizer struct{} + +func (testSymbolizer) Symbolize(_ string, _ plugin.MappingSources, _ *profile.Profile) error { + return nil +} + +type testSymbolizeDemangler struct{} + +func (testSymbolizeDemangler) Symbolize(_ string, _ plugin.MappingSources, p *profile.Profile) error { + for _, fn := range p.Function { + if fn.Name == "" || fn.SystemName == fn.Name { + fn.Name = fakeDemangler(fn.SystemName) + } + } + return nil +} + +func testFetchSymbols(source, post string) ([]byte, error) { + var buf bytes.Buffer + + if source == "http://host2/symbolz" { + for _, address := range strings.Split(post, "+") { + a, _ := strconv.ParseInt(address, 0, 64) + fmt.Fprintf(&buf, "%v\t", address) + if a-testStart < testOffset { + fmt.Fprintf(&buf, "wrong_source_%v_", address) + continue + } + fmt.Fprintf(&buf, "%#x\n", a-testStart-testOffset) + } + return buf.Bytes(), nil + } + for _, address := range strings.Split(post, "+") { + a, _ := strconv.ParseInt(address, 0, 64) + fmt.Fprintf(&buf, "%v\t", address) + if a-testStart > testOffset { + fmt.Fprintf(&buf, "wrong_source_%v_", address) + continue + } + fmt.Fprintf(&buf, "%#x\n", a-testStart) + } + return buf.Bytes(), nil +} + +type testSymbolzSymbolizer struct{} + +func (testSymbolzSymbolizer) Symbolize(variables string, sources plugin.MappingSources, p *profile.Profile) error { + return symbolz.Symbolize(sources, testFetchSymbols, p, nil) +} + +func fakeDemangler(name string) string { + switch name { + case "mangled1000": + return "line1000" + case "mangled2000": + return "line2000" + case "mangled2001": + return "line2001" + case "mangled3000": + return "line3000" + case "mangled3001": + return "line3001" + case "mangled3002": + return "line3002" + case "mangledNEW": + return "operator new" + case "mangledMALLOC": + return "malloc" + default: + return name + } +} + +func cpuProfile() *profile.Profile { + var cpuM = []*profile.Mapping{ + { + ID: 1, + Start: 0x1000, + Limit: 0x4000, + File: "testbinary", + HasFunctions: true, + HasFilenames: true, + HasLineNumbers: true, + HasInlineFrames: true, + }, + } + + var cpuF = []*profile.Function{ + {ID: 1, Name: "mangled1000", SystemName: "mangled1000", Filename: "testdata/file1000.src"}, + {ID: 2, Name: "mangled2000", SystemName: "mangled2000", Filename: "testdata/file2000.src"}, + {ID: 3, Name: "mangled2001", SystemName: "mangled2001", Filename: "testdata/file2000.src"}, + {ID: 4, Name: "mangled3000", SystemName: "mangled3000", Filename: "testdata/file3000.src"}, + {ID: 5, Name: "mangled3001", SystemName: "mangled3001", Filename: "testdata/file3000.src"}, + {ID: 6, Name: "mangled3002", SystemName: "mangled3002", Filename: "testdata/file3000.src"}, + } + + var cpuL = []*profile.Location{ + { + ID: 1000, + Mapping: cpuM[0], + Address: 0x1000, + Line: []profile.Line{ + {Function: cpuF[0], Line: 1}, + }, + }, + { + ID: 2000, + Mapping: cpuM[0], + Address: 0x2000, + Line: []profile.Line{ + {Function: cpuF[2], Line: 9}, + {Function: cpuF[1], Line: 4}, + }, + }, + { + ID: 3000, + Mapping: cpuM[0], + Address: 0x3000, + Line: []profile.Line{ + {Function: cpuF[5], Line: 2}, + {Function: cpuF[4], Line: 5}, + {Function: cpuF[3], Line: 6}, + }, + }, + { + ID: 3001, + Mapping: cpuM[0], + Address: 0x3001, + Line: []profile.Line{ + {Function: cpuF[4], Line: 8}, + {Function: cpuF[3], Line: 9}, + }, + }, + { + ID: 3002, + Mapping: cpuM[0], + Address: 0x3002, + Line: []profile.Line{ + {Function: cpuF[5], Line: 5}, + {Function: cpuF[3], Line: 7}, + }, + }, + } + + return &profile.Profile{ + PeriodType: &profile.ValueType{Type: "cpu", Unit: "milliseconds"}, + Period: 1, + DurationNanos: 10e9, + SampleType: []*profile.ValueType{ + {Type: "samples", Unit: "count"}, + {Type: "cpu", Unit: "milliseconds"}, + }, + Sample: []*profile.Sample{ + { + Location: []*profile.Location{cpuL[0], cpuL[1], cpuL[2]}, + Value: []int64{1000, 1000}, + Label: map[string][]string{ + "key1": []string{"tag1"}, + "key2": []string{"tag1"}, + }, + }, + { + Location: []*profile.Location{cpuL[0], cpuL[3]}, + Value: []int64{100, 100}, + Label: map[string][]string{ + "key1": []string{"tag2"}, + "key3": []string{"tag2"}, + }, + }, + { + Location: []*profile.Location{cpuL[1], cpuL[4]}, + Value: []int64{10, 10}, + Label: map[string][]string{ + "key1": []string{"tag3"}, + "key2": []string{"tag2"}, + }, + }, + { + Location: []*profile.Location{cpuL[2]}, + Value: []int64{10, 10}, + Label: map[string][]string{ + "key1": []string{"tag4"}, + "key2": []string{"tag1"}, + }, + }, + }, + Location: cpuL, + Function: cpuF, + Mapping: cpuM, + } +} + +func cpuProfileSmall() *profile.Profile { + var cpuM = []*profile.Mapping{ + { + ID: 1, + Start: 0x1000, + Limit: 0x4000, + File: "testbinary", + HasFunctions: true, + HasFilenames: true, + HasLineNumbers: true, + HasInlineFrames: true, + }, + } + + var cpuL = []*profile.Location{ + { + ID: 1000, + Mapping: cpuM[0], + Address: 0x1000, + }, + { + ID: 2000, + Mapping: cpuM[0], + Address: 0x2000, + }, + { + ID: 3000, + Mapping: cpuM[0], + Address: 0x3000, + }, + { + ID: 4000, + Mapping: cpuM[0], + Address: 0x4000, + }, + { + ID: 5000, + Mapping: cpuM[0], + Address: 0x5000, + }, + } + + return &profile.Profile{ + PeriodType: &profile.ValueType{Type: "cpu", Unit: "milliseconds"}, + Period: 1, + DurationNanos: 10e9, + SampleType: []*profile.ValueType{ + {Type: "samples", Unit: "count"}, + {Type: "cpu", Unit: "milliseconds"}, + }, + Sample: []*profile.Sample{ + { + Location: []*profile.Location{cpuL[0], cpuL[1], cpuL[2]}, + Value: []int64{1000, 1000}, + }, + { + Location: []*profile.Location{cpuL[3], cpuL[1], cpuL[4]}, + Value: []int64{1000, 1000}, + }, + { + Location: []*profile.Location{cpuL[2]}, + Value: []int64{1000, 1000}, + }, + { + Location: []*profile.Location{cpuL[4]}, + Value: []int64{1000, 1000}, + }, + }, + Location: cpuL, + Function: nil, + Mapping: cpuM, + } +} + +func heapProfile() *profile.Profile { + var heapM = []*profile.Mapping{ + { + ID: 1, + BuildID: "buildid", + Start: 0x1000, + Limit: 0x4000, + HasFunctions: true, + HasFilenames: true, + HasLineNumbers: true, + HasInlineFrames: true, + }, + } + + var heapF = []*profile.Function{ + {ID: 1, Name: "pruneme", SystemName: "pruneme", Filename: "prune.h"}, + {ID: 2, Name: "mangled1000", SystemName: "mangled1000", Filename: "testdata/file1000.src"}, + {ID: 3, Name: "mangled2000", SystemName: "mangled2000", Filename: "testdata/file2000.src"}, + {ID: 4, Name: "mangled2001", SystemName: "mangled2001", Filename: "testdata/file2000.src"}, + {ID: 5, Name: "mangled3000", SystemName: "mangled3000", Filename: "testdata/file3000.src"}, + {ID: 6, Name: "mangled3001", SystemName: "mangled3001", Filename: "testdata/file3000.src"}, + {ID: 7, Name: "mangled3002", SystemName: "mangled3002", Filename: "testdata/file3000.src"}, + {ID: 8, Name: "mangledMALLOC", SystemName: "mangledMALLOC", Filename: "malloc.h"}, + {ID: 9, Name: "mangledNEW", SystemName: "mangledNEW", Filename: "new.h"}, + } + + var heapL = []*profile.Location{ + { + ID: 1000, + Mapping: heapM[0], + Address: 0x1000, + Line: []profile.Line{ + {Function: heapF[0], Line: 100}, + {Function: heapF[7], Line: 100}, + {Function: heapF[1], Line: 1}, + }, + }, + { + ID: 2000, + Mapping: heapM[0], + Address: 0x2000, + Line: []profile.Line{ + {Function: heapF[8], Line: 100}, + {Function: heapF[3], Line: 2}, + {Function: heapF[2], Line: 3}, + }, + }, + { + ID: 3000, + Mapping: heapM[0], + Address: 0x3000, + Line: []profile.Line{ + {Function: heapF[8], Line: 100}, + {Function: heapF[6], Line: 3}, + {Function: heapF[5], Line: 2}, + {Function: heapF[4], Line: 4}, + }, + }, + { + ID: 3001, + Mapping: heapM[0], + Address: 0x3001, + Line: []profile.Line{ + {Function: heapF[0], Line: 100}, + {Function: heapF[8], Line: 100}, + {Function: heapF[5], Line: 2}, + {Function: heapF[4], Line: 4}, + }, + }, + { + ID: 3002, + Mapping: heapM[0], + Address: 0x3002, + Line: []profile.Line{ + {Function: heapF[6], Line: 3}, + {Function: heapF[4], Line: 4}, + }, + }, + } + + return &profile.Profile{ + PeriodType: &profile.ValueType{Type: "allocations", Unit: "bytes"}, + Period: 524288, + SampleType: []*profile.ValueType{ + {Type: "inuse_objects", Unit: "count"}, + {Type: "inuse_space", Unit: "bytes"}, + }, + Sample: []*profile.Sample{ + { + Location: []*profile.Location{heapL[0], heapL[1], heapL[2]}, + Value: []int64{10, 1024000}, + NumLabel: map[string][]int64{ + "bytes": []int64{102400}, + }, + }, + { + Location: []*profile.Location{heapL[0], heapL[3]}, + Value: []int64{20, 4096000}, + NumLabel: map[string][]int64{ + "bytes": []int64{204800}, + }, + }, + { + Location: []*profile.Location{heapL[1], heapL[4]}, + Value: []int64{40, 65536000}, + NumLabel: map[string][]int64{ + "bytes": []int64{1638400}, + }, + }, + { + Location: []*profile.Location{heapL[2]}, + Value: []int64{80, 32768000}, + NumLabel: map[string][]int64{ + "bytes": []int64{409600}, + }, + }, + }, + DropFrames: ".*operator new.*|malloc", + Location: heapL, + Function: heapF, + Mapping: heapM, + } +} + +func contentionProfile() *profile.Profile { + var contentionM = []*profile.Mapping{ + { + ID: 1, + BuildID: "buildid-contention", + Start: 0x1000, + Limit: 0x4000, + HasFunctions: true, + HasFilenames: true, + HasLineNumbers: true, + HasInlineFrames: true, + }, + } + + var contentionF = []*profile.Function{ + {ID: 1, Name: "mangled1000", SystemName: "mangled1000", Filename: "testdata/file1000.src"}, + {ID: 2, Name: "mangled2000", SystemName: "mangled2000", Filename: "testdata/file2000.src"}, + {ID: 3, Name: "mangled2001", SystemName: "mangled2001", Filename: "testdata/file2000.src"}, + {ID: 4, Name: "mangled3000", SystemName: "mangled3000", Filename: "testdata/file3000.src"}, + {ID: 5, Name: "mangled3001", SystemName: "mangled3001", Filename: "testdata/file3000.src"}, + {ID: 6, Name: "mangled3002", SystemName: "mangled3002", Filename: "testdata/file3000.src"}, + } + + var contentionL = []*profile.Location{ + { + ID: 1000, + Mapping: contentionM[0], + Address: 0x1000, + Line: []profile.Line{ + {Function: contentionF[0], Line: 1}, + }, + }, + { + ID: 2000, + Mapping: contentionM[0], + Address: 0x2000, + Line: []profile.Line{ + {Function: contentionF[2], Line: 2}, + {Function: contentionF[1], Line: 3}, + }, + }, + { + ID: 3000, + Mapping: contentionM[0], + Address: 0x3000, + Line: []profile.Line{ + {Function: contentionF[5], Line: 2}, + {Function: contentionF[4], Line: 3}, + {Function: contentionF[3], Line: 5}, + }, + }, + { + ID: 3001, + Mapping: contentionM[0], + Address: 0x3001, + Line: []profile.Line{ + {Function: contentionF[4], Line: 3}, + {Function: contentionF[3], Line: 5}, + }, + }, + { + ID: 3002, + Mapping: contentionM[0], + Address: 0x3002, + Line: []profile.Line{ + {Function: contentionF[5], Line: 4}, + {Function: contentionF[3], Line: 3}, + }, + }, + } + + return &profile.Profile{ + PeriodType: &profile.ValueType{Type: "contentions", Unit: "count"}, + Period: 524288, + SampleType: []*profile.ValueType{ + {Type: "contentions", Unit: "count"}, + {Type: "delay", Unit: "nanoseconds"}, + }, + Sample: []*profile.Sample{ + { + Location: []*profile.Location{contentionL[0], contentionL[1], contentionL[2]}, + Value: []int64{10, 10240000}, + }, + { + Location: []*profile.Location{contentionL[0], contentionL[3]}, + Value: []int64{20, 40960000}, + }, + { + Location: []*profile.Location{contentionL[1], contentionL[4]}, + Value: []int64{40, 65536000}, + }, + { + Location: []*profile.Location{contentionL[2]}, + Value: []int64{80, 32768000}, + }, + }, + Location: contentionL, + Function: contentionF, + Mapping: contentionM, + Comments: []string{"Comment #1", "Comment #2"}, + } +} + +func symzProfile() *profile.Profile { + var symzM = []*profile.Mapping{ + { + ID: 1, + Start: testStart, + Limit: 0x4000, + File: "testbinary", + }, + } + + var symzL = []*profile.Location{ + {ID: 1, Mapping: symzM[0], Address: testStart}, + {ID: 2, Mapping: symzM[0], Address: testStart + 0x1000}, + {ID: 3, Mapping: symzM[0], Address: testStart + 0x2000}, + } + + return &profile.Profile{ + PeriodType: &profile.ValueType{Type: "cpu", Unit: "milliseconds"}, + Period: 1, + DurationNanos: 10e9, + SampleType: []*profile.ValueType{ + {Type: "samples", Unit: "count"}, + {Type: "cpu", Unit: "milliseconds"}, + }, + Sample: []*profile.Sample{ + { + Location: []*profile.Location{symzL[0], symzL[1], symzL[2]}, + Value: []int64{1, 1}, + }, + }, + Location: symzL, + Mapping: symzM, + } +} + +var autoCompleteTests = []struct { + in string + out string +}{ + {"", ""}, + {"xyz", "xyz"}, // no match + {"dis", "disasm"}, // single match + {"t", "t"}, // many matches + {"top abc", "top abc"}, // no function name match + {"top mangledM", "top mangledMALLOC"}, // single function name match + {"top cmd cmd mangledM", "top cmd cmd mangledMALLOC"}, + {"top mangled", "top mangled"}, // many function name matches + {"cmd mangledM", "cmd mangledM"}, // invalid command + {"top mangledM cmd", "top mangledM cmd"}, // cursor misplaced + {"top edMA", "top mangledMALLOC"}, // single infix function name match + {"top -mangledM", "top -mangledMALLOC"}, // ignore sign handled + {"lin", "lines"}, // single variable match + {"EdGeF", "edgefraction"}, // single capitalized match + {"help dis", "help disasm"}, // help command match + {"help relative_perc", "help relative_percentages"}, // help variable match + {"help coMpa", "help compact_labels"}, // help variable capitalized match +} + +func TestAutoComplete(t *testing.T) { + complete := newCompleter(functionNames(heapProfile())) + + for _, test := range autoCompleteTests { + if out := complete(test.in); out != test.out { + t.Errorf("autoComplete(%s) = %s; want %s", test.in, out, test.out) + } + } +} + +func TestTagFilter(t *testing.T) { + var tagFilterTests = []struct { + name, value string + tags map[string][]string + want bool + }{ + {"test1", "tag2", map[string][]string{"value1": {"tag1", "tag2"}}, true}, + {"test2", "tag3", map[string][]string{"value1": {"tag1", "tag2"}}, false}, + {"test3", "tag1,tag3", map[string][]string{"value1": {"tag1", "tag2"}, "value2": {"tag3"}}, true}, + {"test4", "t..[12],t..3", map[string][]string{"value1": {"tag1", "tag2"}, "value2": {"tag3"}}, true}, + {"test5", "tag2,tag3", map[string][]string{"value1": {"tag1", "tag2"}}, false}, + } + + for _, test := range tagFilterTests { + filter, err := compileTagFilter(test.name, test.value, &proftest.TestUI{T: t}, nil) + if err != nil { + t.Errorf("tagFilter %s:%v", test.name, err) + continue + } + s := profile.Sample{ + Label: test.tags, + } + + if got := filter(&s); got != test.want { + t.Errorf("tagFilter %s: got %v, want %v", test.name, got, test.want) + } + } +} + +func TestSymbolzAfterMerge(t *testing.T) { + baseVars := pprofVariables + pprofVariables = baseVars.makeCopy() + defer func() { pprofVariables = baseVars }() + + f := baseFlags() + f.args = []string{"symbolz", "http://host2/symbolz"} + + o := setDefaults(nil) + o.Flagset = f + o.Obj = new(binutils.Binutils) + src, cmd, err := parseFlags(o) + if err != nil { + t.Fatalf("parseFlags: %v", err) + } + + if len(cmd) != 1 || cmd[0] != "proto" { + t.Fatalf("parseFlags returned command %v, want [proto]", cmd) + } + + o.Fetch = testFetcher{} + o.Sym = testSymbolzSymbolizer{} + p, err := fetchProfiles(src, o) + if err != nil { + t.Fatalf("fetchProfiles: %v", err) + } + if len(p.Location) != 3 { + t.Errorf("Got %d locations after merge, want %d", len(p.Location), 3) + } + for i, l := range p.Location { + if len(l.Line) != 1 { + t.Errorf("Number of lines for symbolz %#x in iteration %d, got %d, want %d", l.Address, i, len(l.Line), 1) + continue + } + address := l.Address - l.Mapping.Start + if got, want := l.Line[0].Function.Name, fmt.Sprintf("%#x", address); got != want { + t.Errorf("symbolz %#x, got %s, want %s", address, got, want) + } + } +} diff --git a/src/internal/driver/fetch.go b/src/internal/driver/fetch.go new file mode 100644 index 00000000..1a2113d3 --- /dev/null +++ b/src/internal/driver/fetch.go @@ -0,0 +1,367 @@ +// Copyright 2014 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package driver + +import ( + "fmt" + "io" + "net/http" + "net/url" + "os" + "path/filepath" + "strconv" + "sync" + "time" + + "internal/measurement" + "internal/plugin" + "profile" +) + +// fetchProfiles fetches and symbolizes the profiles specified by s. +// It will merge all the profiles it is able to retrieve, even if +// there are some failures. It will return an error if it is unable to +// fetch any profiles. +func fetchProfiles(s *source, o *plugin.Options) (*profile.Profile, error) { + if err := setTmpDir(o.UI); err != nil { + return nil, err + } + + p, msrcs, save, err := concurrentGrab(s, o.Fetch, o.Obj, o.UI) + if err != nil { + return nil, err + } + + // Symbolize the merged profile. + if err := o.Sym.Symbolize(s.Symbolize, msrcs, p); err != nil { + return nil, err + } + p.RemoveUninteresting() + + // Save a copy of the merged profile if there is at least one remote source. + if save { + prefix := "pprof." + if len(p.Mapping) > 0 && p.Mapping[0].File != "" { + prefix += filepath.Base(p.Mapping[0].File) + "." + } + for _, s := range p.SampleType { + prefix += s.Type + "." + } + + dir := os.Getenv("PPROF_TMPDIR") + tempFile, err := newTempFile(dir, prefix, ".pb.gz") + if err == nil { + if err = p.Write(tempFile); err == nil { + o.UI.PrintErr("Saved profile in ", tempFile.Name()) + } + } + if err != nil { + o.UI.PrintErr("Could not save profile: ", err) + } + } + + if err := p.CheckValid(); err != nil { + return nil, err + } + + return p, nil +} + +// concurrentGrab fetches multiple profiles concurrently +func concurrentGrab(s *source, fetch plugin.Fetcher, obj plugin.ObjTool, ui plugin.UI) (*profile.Profile, plugin.MappingSources, bool, error) { + wg := sync.WaitGroup{} + numprofs := len(s.Sources) + len(s.Base) + profs := make([]*profile.Profile, numprofs) + msrcs := make([]plugin.MappingSources, numprofs) + remote := make([]bool, numprofs) + errs := make([]error, numprofs) + for i, source := range s.Sources { + wg.Add(1) + go func(i int, src string) { + defer wg.Done() + profs[i], msrcs[i], remote[i], errs[i] = grabProfile(s, src, 1, fetch, obj, ui) + }(i, source) + } + for i, source := range s.Base { + wg.Add(1) + go func(i int, src string) { + defer wg.Done() + profs[i], msrcs[i], remote[i], errs[i] = grabProfile(s, src, -1, fetch, obj, ui) + }(i+len(s.Sources), source) + } + wg.Wait() + var save bool + var numFailed = 0 + for i, src := range s.Sources { + if errs[i] != nil { + ui.PrintErr(src + ": " + errs[i].Error()) + numFailed++ + } + save = save || remote[i] + } + for i, src := range s.Base { + b := i + len(s.Sources) + if errs[b] != nil { + ui.PrintErr(src + ": " + errs[b].Error()) + numFailed++ + } + save = save || remote[b] + } + if numFailed == numprofs { + return nil, nil, false, fmt.Errorf("failed to fetch any profiles") + } + if numFailed > 0 { + ui.PrintErr(fmt.Sprintf("fetched %d profiles out of %d", numprofs-numFailed, numprofs)) + } + + scaled := make([]*profile.Profile, 0, numprofs) + for _, p := range profs { + if p != nil { + scaled = append(scaled, p) + } + } + + // Merge profiles. + if err := measurement.ScaleProfiles(scaled); err != nil { + return nil, nil, false, err + } + + p, err := profile.Merge(scaled) + if err != nil { + return nil, nil, false, err + } + + // Combine mapping sources. + msrc := make(plugin.MappingSources) + for _, ms := range msrcs { + for m, s := range ms { + msrc[m] = append(msrc[m], s...) + } + } + + return p, msrc, save, nil +} + +// setTmpDir sets the PPROF_TMPDIR environment variable with a new +// temp directory, if not already set. +func setTmpDir(ui plugin.UI) error { + if profileDir := os.Getenv("PPROF_TMPDIR"); profileDir != "" { + return nil + } + for _, tmpDir := range []string{os.Getenv("HOME") + "/pprof", "/tmp"} { + if err := os.MkdirAll(tmpDir, 0755); err != nil { + ui.PrintErr("Could not use temp dir ", tmpDir, ": ", err.Error()) + continue + } + os.Setenv("PPROF_TMPDIR", tmpDir) + return nil + } + return fmt.Errorf("failed to identify temp dir") +} + +// grabProfile fetches a profile. Returns the profile, sources for the +// profile mappings, a bool indicating if the profile was fetched +// remotely, and an error. +func grabProfile(s *source, source string, scale float64, fetcher plugin.Fetcher, obj plugin.ObjTool, ui plugin.UI) (p *profile.Profile, msrc plugin.MappingSources, remote bool, err error) { + var src string + duration, timeout := time.Duration(s.Seconds)*time.Second, time.Duration(s.Timeout)*time.Second + if fetcher != nil { + p, src, err = fetcher.Fetch(source, duration, timeout) + if err != nil { + return + } + } + if err != nil || p == nil { + // Fetch the profile over HTTP or from a file. + p, src, err = fetch(source, duration, timeout, ui) + if err != nil { + return + } + } + + if err = p.CheckValid(); err != nil { + return + } + + // Apply local changes to the profile. + p.Scale(scale) + + // Update the binary locations from command line and paths. + locateBinaries(p, s, obj, ui) + + // Collect the source URL for all mappings. + if src != "" { + msrc = collectMappingSources(p, src) + remote = true + } + return +} + +// collectMappingSources saves the mapping sources of a profile. +func collectMappingSources(p *profile.Profile, source string) plugin.MappingSources { + ms := plugin.MappingSources{} + for _, m := range p.Mapping { + src := struct { + Source string + Start uint64 + }{ + source, m.Start, + } + if key := m.BuildID; key != "" { + ms[key] = append(ms[key], src) + } + if key := m.File; key != "" { + ms[key] = append(ms[key], src) + } + } + return ms +} + +// locateBinaries searches for binary files listed in the profile and, if found, +// updates the profile accordingly. +func locateBinaries(p *profile.Profile, s *source, obj plugin.ObjTool, ui plugin.UI) { + // Construct search path to examine + searchPath := os.Getenv("PPROF_BINARY_PATH") + if searchPath == "" { + // Use $HOME/pprof/binaries as default directory for local symbolization binaries + searchPath = filepath.Join(os.Getenv("HOME"), "pprof", "binaries") + } + +mapping: + for i, m := range p.Mapping { + var baseName string + // Replace executable filename/buildID with the overrides from source. + // Assumes the executable is the first Mapping entry. + if i == 0 { + if s.ExecName != "" { + m.File = s.ExecName + } + if s.BuildID != "" { + m.BuildID = s.BuildID + } + } + if m.File != "" { + baseName = filepath.Base(m.File) + } + + for _, path := range filepath.SplitList(searchPath) { + var fileNames []string + if m.BuildID != "" { + fileNames = []string{filepath.Join(path, m.BuildID, baseName)} + if matches, err := filepath.Glob(filepath.Join(path, m.BuildID, "*")); err == nil { + fileNames = append(fileNames, matches...) + } + } + if baseName != "" { + fileNames = append(fileNames, filepath.Join(path, baseName)) + } + for _, name := range fileNames { + if f, err := obj.Open(name, m.Start, m.Limit, m.Offset); err == nil { + defer f.Close() + fileBuildID := f.BuildID() + if m.BuildID != "" && m.BuildID != fileBuildID { + ui.PrintErr("Ignoring local file " + name + ": build-id mismatch (" + m.BuildID + " != " + fileBuildID + ")") + } else { + m.File = name + continue mapping + } + } + } + } + } +} + +// fetch fetches a profile from source, within the timeout specified, +// producing messages through the ui. It returns the profile and the +// url of the actual source of the profile for remote profiles. +func fetch(source string, duration, timeout time.Duration, ui plugin.UI) (p *profile.Profile, src string, err error) { + var f io.ReadCloser + + if sourceURL, timeout := adjustURL(source, duration, timeout); sourceURL != "" { + ui.Print("Fetching profile over HTTP from " + sourceURL) + if duration > 0 { + ui.Print(fmt.Sprintf("Please wait... (%v)", duration)) + } + f, err = fetchURL(sourceURL, timeout) + src = sourceURL + } else { + f, err = os.Open(source) + } + if err == nil { + defer f.Close() + p, err = profile.Parse(f) + } + return +} + +// fetchURL fetches a profile from a URL using HTTP. +func fetchURL(source string, timeout time.Duration) (io.ReadCloser, error) { + resp, err := httpGet(source, timeout) + if err != nil { + return nil, fmt.Errorf("http fetch %s: %v", source, err) + } + if resp.StatusCode != http.StatusOK { + return nil, fmt.Errorf("server response: %s", resp.Status) + } + + return resp.Body, nil +} + +// adjustURL validates if a profile source is a URL and returns an +// cleaned up URL and the timeout to use for retrieval over HTTP. +// If the source cannot be recognized as a URL it returns an empty string. +func adjustURL(source string, duration, timeout time.Duration) (string, time.Duration) { + u, err := url.Parse(source) + if err != nil || (u.Host == "" && u.Scheme != "" && u.Scheme != "file") { + // Try adding http:// to catch sources of the form hostname:port/path. + // url.Parse treats "hostname" as the scheme. + u, err = url.Parse("http://" + source) + } + if err != nil || u.Host == "" { + return "", 0 + } + + // Apply duration/timeout overrides to URL. + values := u.Query() + if duration > 0 { + values.Set("seconds", fmt.Sprint(int(duration.Seconds()))) + } else { + if urlSeconds := values.Get("seconds"); urlSeconds != "" { + if us, err := strconv.ParseInt(urlSeconds, 10, 32); err == nil { + duration = time.Duration(us) * time.Second + } + } + } + if timeout <= 0 { + if duration > 0 { + timeout = duration + duration/2 + } else { + timeout = 60 * time.Second + } + } + u.RawQuery = values.Encode() + return u.String(), timeout +} + +// httpGet is a wrapper around http.Get; it is defined as a variable +// so it can be redefined during for testing. +var httpGet = func(url string, timeout time.Duration) (*http.Response, error) { + client := &http.Client{ + Transport: &http.Transport{ + ResponseHeaderTimeout: timeout + 5*time.Second, + }, + } + return client.Get(url) +} diff --git a/src/internal/driver/fetch_test.go b/src/internal/driver/fetch_test.go new file mode 100644 index 00000000..62be8b66 --- /dev/null +++ b/src/internal/driver/fetch_test.go @@ -0,0 +1,148 @@ +// Copyright 2014 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package driver + +import ( + "fmt" + "io/ioutil" + "net/http" + "net/url" + "os" + "path/filepath" + "regexp" + "testing" + "time" + + "internal/plugin" + "internal/proftest" + "profile" +) + +func TestSymbolizationPath(t *testing.T) { + // Save environment variables to restore after test + saveHome := os.Getenv("HOME") + savePath := os.Getenv("PPROF_BINARY_PATH") + + tempdir, err := ioutil.TempDir("", "home") + if err != nil { + t.Fatal("creating temp dir: ", err) + } + defer os.RemoveAll(tempdir) + os.MkdirAll(filepath.Join(tempdir, "pprof", "binaries", "abcde10001"), 0700) + os.Create(filepath.Join(tempdir, "pprof", "binaries", "abcde10001", "binary")) + + obj := testObj{tempdir} + os.Setenv("HOME", tempdir) + for _, tc := range []struct { + env, file, buildID, want string + msgCount int + }{ + {"", "/usr/bin/binary", "", "/usr/bin/binary", 0}, + {"", "/usr/bin/binary", "fedcb10000", "/usr/bin/binary", 0}, + {"", "/prod/path/binary", "abcde10001", filepath.Join(tempdir, "pprof/binaries/abcde10001/binary"), 0}, + {"/alternate/architecture", "/usr/bin/binary", "", "/alternate/architecture/binary", 0}, + {"/alternate/architecture", "/usr/bin/binary", "abcde10001", "/alternate/architecture/binary", 0}, + {"/nowhere:/alternate/architecture", "/usr/bin/binary", "fedcb10000", "/usr/bin/binary", 1}, + {"/nowhere:/alternate/architecture", "/usr/bin/binary", "abcde10002", "/usr/bin/binary", 1}, + } { + os.Setenv("PPROF_BINARY_PATH", tc.env) + p := &profile.Profile{ + Mapping: []*profile.Mapping{ + { + File: tc.file, + BuildID: tc.buildID, + }, + }, + } + s := &source{} + locateBinaries(p, s, obj, &proftest.TestUI{t, tc.msgCount}) + if file := p.Mapping[0].File; file != tc.want { + t.Errorf("%s:%s:%s, want %s, got %s", tc.env, tc.file, tc.buildID, tc.want, file) + } + } + os.Setenv("HOME", saveHome) + os.Setenv("PPROF_BINARY_PATH", savePath) +} + +type testObj struct { + home string +} + +func (o testObj) Open(file string, start, limit, offset uint64) (plugin.ObjFile, error) { + switch file { + case "/alternate/architecture/binary": + return testFile{file, "abcde10001"}, nil + case "/usr/bin/binary": + return testFile{file, "fedcb10000"}, nil + case filepath.Join(o.home, "pprof/binaries/abcde10001/binary"): + return testFile{file, "abcde10001"}, nil + } + return nil, fmt.Errorf("not found: %s", file) +} +func (testObj) Demangler(_ string) func(names []string) (map[string]string, error) { + return func(names []string) (map[string]string, error) { return nil, nil } +} +func (testObj) Disasm(file string, start, end uint64) ([]plugin.Inst, error) { return nil, nil } + +type testFile struct{ name, buildID string } + +func (f testFile) Name() string { return f.name } +func (testFile) Base() uint64 { return 0 } +func (f testFile) BuildID() string { return f.buildID } +func (testFile) SourceLine(addr uint64) ([]plugin.Frame, error) { return nil, nil } +func (testFile) Symbols(r *regexp.Regexp, addr uint64) ([]*plugin.Sym, error) { return nil, nil } +func (testFile) Close() error { return nil } + +func TestFetch(t *testing.T) { + const path = "testdata/" + + // Intercept http.Get calls from HTTPFetcher. + httpGet = stubHTTPGet + + for _, source := range [][2]string{ + {path + "go.crc32.cpu", "go.crc32.cpu"}, + {"http://localhost/profile?file=cppbench.cpu", "cppbench.cpu"}, + } { + p, _, err := fetch(source[0], 0, 10*time.Second, &proftest.TestUI{t, 0}) + if err != nil { + t.Fatalf("%s: %s", source[0], err) + } + if len(p.Sample) == 0 { + t.Errorf("want non-zero samples") + } + } +} + +// stubHTTPGet intercepts a call to http.Get and rewrites it to use +// "file://" to get the profile directly from a file. +func stubHTTPGet(source string, _ time.Duration) (*http.Response, error) { + url, err := url.Parse(source) + if err != nil { + return nil, err + } + + values := url.Query() + file := values.Get("file") + + if file == "" { + return nil, fmt.Errorf("want .../file?profile, got %s", source) + } + + t := &http.Transport{} + t.RegisterProtocol("file", http.NewFileTransport(http.Dir("testdata/"))) + + c := &http.Client{Transport: t} + return c.Get("file:///" + file) +} diff --git a/src/internal/driver/interactive.go b/src/internal/driver/interactive.go new file mode 100644 index 00000000..28ae3b04 --- /dev/null +++ b/src/internal/driver/interactive.go @@ -0,0 +1,372 @@ +// Copyright 2014 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package driver + +import ( + "fmt" + "io" + "sort" + "strconv" + "strings" + + "internal/plugin" + "internal/report" + "profile" +) + +// interactive starts a shell to read pprof commands. +func interactive(p *profile.Profile, o *plugin.Options) error { + // Enter command processing loop. + o.UI.SetAutoComplete(newCompleter(functionNames(p))) + pprofVariables.set("compact_labels", "true") + + // Do not wait for the visualizer to complete, to allow multiple + // graphs to be visualized simultaneously. + waitForVisualizer = false + shortcuts := profileShortcuts(p) + + greetings(p, o.UI) + for { + input, err := o.UI.ReadLine(pprofPrompt(p)) + if err != nil { + if err != io.EOF { + return err + } + if input == "" { + return nil + } + } + + for _, input := range shortcuts.expand(input) { + // Process assignments of the form variable=value + if s := strings.SplitN(input, "=", 2); len(s) > 0 { + name := strings.TrimSpace(s[0]) + + if v := pprofVariables[name]; v != nil { + var value string + if len(s) == 2 { + value = strings.TrimSpace(s[1]) + } + if err := pprofVariables.set(name, value); err != nil { + o.UI.PrintErr(err) + } + continue + } + } + + tokens := strings.Fields(input) + if len(tokens) == 0 { + continue + } + + switch tokens[0] { + case "exit", "quit": + return nil + case "help": + commandHelp(strings.Join(tokens[1:], " "), o.UI) + continue + } + + args, vars, err := parseCommandLine(tokens) + if err == nil { + err = generateReportWrapper(p, args, vars, o) + } + + if err != nil { + o.UI.PrintErr(err) + } + } + } +} + +var generateReportWrapper = generateReport // For testing purposes. + +// greetings prints a brief welcome and some overall profile +// information before accepting interactive commands. +func greetings(p *profile.Profile, ui plugin.UI) { + ropt, err := reportOptions(p, pprofVariables) + if err == nil { + ui.Print(strings.Join(report.ProfileLabels(report.New(p, ropt)), "\n")) + } + ui.Print("Entering interactive mode (type \"help\" for commands)") +} + +// shortcuts represents composite commands that expand into a sequence +// of other commands. +type shortcuts map[string][]string + +func (a shortcuts) expand(input string) []string { + if a != nil { + if r, ok := a[input]; ok { + return r + } + } + return []string{input} +} + +var pprofShortcuts = shortcuts{ + ":": []string{"focus=", "ignore=", "hide=", "tagfocus=", "tagignore="}, +} + +// profileShortcuts creates macros for convenience and backward compatibility. +func profileShortcuts(p *profile.Profile) shortcuts { + s := pprofShortcuts + // Add shortcuts for sample types + for _, st := range p.SampleType { + command := fmt.Sprintf("sample_index=%s", st.Type) + s[st.Type] = []string{command} + s["total_"+st.Type] = []string{"mean=0", command} + s["mean_"+st.Type] = []string{"mean=1", command} + } + return s +} + +// pprofPrompt returns the prompt displayed to accept commands. +// hides some default values to reduce clutter. +func pprofPrompt(p *profile.Profile) string { + var args []string + for n, o := range pprofVariables { + v := o.stringValue() + if v == "" { + continue + } + // Do not show some default values. + switch { + case n == "unit" && v == "minimum": + continue + case n == "divide_by" && v == "1": + continue + case n == "nodecount" && v == "-1": + continue + case n == "sample_index": + index, err := locateSampleIndex(p, v) + if err != nil { + v = "ERROR: " + err.Error() + } else { + v = fmt.Sprintf("%s (%d)", p.SampleType[index].Type, index) + } + case n == "trim" || n == "compact_labels": + if o.boolValue() == true { + continue + } + case o.kind == boolKind: + if o.boolValue() == false { + continue + } + } + args = append(args, fmt.Sprintf(" %-25s : %s", n, v)) + } + sort.Strings(args) + return "Options:\n" + strings.Join(args, "\n") + "\nPPROF>" +} + +// parseCommandLine parses a command and returns the pprof command to +// execute and a set of variables for the report. +func parseCommandLine(input []string) ([]string, variables, error) { + cmd, args := input[:1], input[1:] + name := cmd[0] + + c := pprofCommands[cmd[0]] + if c == nil { + return nil, nil, fmt.Errorf("Unrecognized command: %q", name) + } + + if c.hasParam { + if len(args) == 0 { + return nil, nil, fmt.Errorf("command %s requires an argument", name) + } + cmd = append(cmd, args[0]) + args = args[1:] + } + + // Copy the variables as options set in the command line are not persistent. + vcopy := pprofVariables.makeCopy() + + var focus, ignore string + for i := 0; i < len(args); i++ { + t := args[i] + if _, err := strconv.ParseInt(t, 10, 32); err == nil { + vcopy.set("nodecount", t) + continue + } + switch t[0] { + case '>': + outputFile := t[1:] + if outputFile == "" { + i++ + if i >= len(args) { + return nil, nil, fmt.Errorf("Unexpected end of line after >") + } + outputFile = args[i] + } + vcopy.set("output", outputFile) + case '-': + if t == "--cum" || t == "-cum" { + vcopy.set("cum", "t") + continue + } + ignore = catRegex(ignore, t[1:]) + default: + focus = catRegex(focus, t) + } + } + + if name == "tags" { + updateFocusIgnore(vcopy, "tag", focus, ignore) + } else { + updateFocusIgnore(vcopy, "", focus, ignore) + } + + if vcopy["nodecount"].intValue() == -1 && (name == "text" || name == "top") { + vcopy.set("nodecount", "10") + } + + return cmd, vcopy, nil +} + +func updateFocusIgnore(v variables, prefix, f, i string) { + if f != "" { + focus := prefix + "focus" + v.set(focus, catRegex(v[focus].value, f)) + } + + if i != "" { + ignore := prefix + "ignore" + v.set(ignore, catRegex(v[ignore].value, i)) + } +} + +func catRegex(a, b string) string { + if a != "" && b != "" { + return a + "|" + b + } + return a + b +} + +// commandHelp displays help and usage information for all Commands +// and Variables or a specific Command or Variable. +func commandHelp(args string, ui plugin.UI) { + if args == "" { + help := usage(false) + help = help + ` + : Clear focus/ignore/hide/tagfocus/tagignore + + type "help " for more information +` + + ui.Print(help) + return + } + + if c := pprofCommands[args]; c != nil { + ui.Print(c.help(args)) + return + } + + if v := pprofVariables[args]; v != nil { + ui.Print(v.help + "\n") + return + } + + ui.PrintErr("Unknown command: " + args) +} + +// newCompleter creates an autocompletion function for a set of commands. +func newCompleter(fns []string) func(string) string { + return func(line string) string { + v := pprofVariables + switch tokens := strings.Fields(line); len(tokens) { + case 0: + // Nothing to complete + case 1: + // Single token -- complete command name + if match := matchVariableOrCommand(v, tokens[0]); match != "" { + return match + } + case 2: + if tokens[0] == "help" { + if match := matchVariableOrCommand(v, tokens[1]); match != "" { + return tokens[0] + " " + match + } + return line + } + fallthrough + default: + // Multiple tokens -- complete using functions, except for tags + if cmd := pprofCommands[tokens[0]]; cmd != nil && tokens[0] != "tags" { + lastTokenIdx := len(tokens) - 1 + lastToken := tokens[lastTokenIdx] + if strings.HasPrefix(lastToken, "-") { + lastToken = "-" + functionCompleter(lastToken[1:], fns) + } else { + lastToken = functionCompleter(lastToken, fns) + } + return strings.Join(append(tokens[:lastTokenIdx], lastToken), " ") + } + } + return line + } +} + +// matchCommand attempts to match a string token to the prefix of a Command. +func matchVariableOrCommand(v variables, token string) string { + token = strings.ToLower(token) + found := "" + for cmd := range pprofCommands { + if strings.HasPrefix(cmd, token) { + if found != "" { + return "" + } + found = cmd + } + } + for variable := range v { + if strings.HasPrefix(variable, token) { + if found != "" { + return "" + } + found = variable + } + } + return found +} + +// functionCompleter replaces provided substring with a function +// name retrieved from a profile if a single match exists. Otherwise, +// it returns unchanged substring. It defaults to no-op if the profile +// is not specified. +func functionCompleter(substring string, fns []string) string { + found := "" + for _, fName := range fns { + if strings.Contains(fName, substring) { + if found != "" { + return substring + } + found = fName + } + } + if found != "" { + return found + } + return substring +} + +func functionNames(p *profile.Profile) []string { + var fns []string + for _, fn := range p.Function { + fns = append(fns, fn.Name) + } + return fns +} diff --git a/src/internal/driver/interactive_test.go b/src/internal/driver/interactive_test.go new file mode 100644 index 00000000..b12fb254 --- /dev/null +++ b/src/internal/driver/interactive_test.go @@ -0,0 +1,305 @@ +// Copyright 2014 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package driver + +import ( + "fmt" + "io" + "math/rand" + "strings" + "testing" + + "internal/plugin" + "profile" + "internal/report" +) + +func TestShell(t *testing.T) { + p := &profile.Profile{} + generateReportWrapper = checkValue + defer func() { generateReportWrapper = generateReport }() + + // Use test commands and variables to exercise interactive processing + var savedCommands commands + savedCommands, pprofCommands = pprofCommands, testCommands + defer func() { pprofCommands = savedCommands }() + + savedVariables := pprofVariables + defer func() { pprofVariables = savedVariables }() + + // Random interleave of independent scripts + pprofVariables = testVariables(savedVariables) + o := setDefaults(nil) + o.UI = newUI(t, interleave(script, 0)) + if err := interactive(p, o); err != nil { + t.Error("first attempt:", err) + } + // Random interleave of independent scripts + pprofVariables = testVariables(savedVariables) + o.UI = newUI(t, interleave(script, 1)) + if err := interactive(p, o); err != nil { + t.Error("second attempt:", err) + } + + // Random interleave of independent scripts with shortcuts + pprofVariables = testVariables(savedVariables) + var scScript []string + pprofShortcuts, scScript = makeShortcuts(interleave(script, 2), 1) + o.UI = newUI(t, scScript) + if err := interactive(p, o); err != nil { + t.Error("first shortcut attempt:", err) + } + + // Random interleave of independent scripts with shortcuts + pprofVariables = testVariables(savedVariables) + pprofShortcuts, scScript = makeShortcuts(interleave(script, 1), 2) + o.UI = newUI(t, scScript) + if err := interactive(p, o); err != nil { + t.Error("second shortcut attempt:", err) + } + + // Verify propagation of IO errors + pprofVariables = testVariables(savedVariables) + o.UI = newUI(t, []string{"**error**"}) + if err := interactive(p, o); err == nil { + t.Error("expected IO error, got nil") + } + +} + +var testCommands = commands{ + "check": &command{report.Raw, nil, true, "", ""}, +} + +func testVariables(base variables) variables { + v := base.makeCopy() + + v["b"] = &variable{boolKind, "f", "", ""} + v["bb"] = &variable{boolKind, "f", "", ""} + v["i"] = &variable{intKind, "0", "", ""} + v["ii"] = &variable{intKind, "0", "", ""} + v["f"] = &variable{floatKind, "0", "", ""} + v["ff"] = &variable{floatKind, "0", "", ""} + v["s"] = &variable{stringKind, "", "", ""} + v["ss"] = &variable{stringKind, "", "", ""} + + v["ta"] = &variable{boolKind, "f", "radio", ""} + v["tb"] = &variable{boolKind, "f", "radio", ""} + v["tc"] = &variable{boolKind, "t", "radio", ""} + + return v +} + +// script contains sequences of commands to be executed for testing. Commands +// are split by semicolon and interleaved randomly, so they must be +// independent from each other. +var script = []string{ + "bb=true;bb=false;check bb=false;bb=yes;check bb=true", + "b=1;check b=true;b=n;check b=false", + "i=-1;i=-2;check i=-2;i=999999;check i=999999", + "check ii=0;ii=-1;check ii=-1;ii=100;check ii=100", + "f=-1;f=-2.5;check f=-2.5;f=0.0001;check f=0.0001", + "check ff=0;ff=-1.01;check ff=-1.01;ff=100;check ff=100", + "s=one;s=two;check s=two", + "ss=tree;check ss=tree;ss=;check ss;ss=forest;check ss=forest", + "ta=true;check ta=true;check tb=false;check tc=false;tb=1;check tb=true;check ta=false;check tc=false;tc=yes;check tb=false;check ta=false;check tc=true", +} + +func makeShortcuts(input []string, seed int) (shortcuts, []string) { + rand.Seed(int64(seed)) + + s := shortcuts{} + var output, chunk []string + for _, l := range input { + chunk = append(chunk, l) + switch rand.Intn(3) { + case 0: + // Create a macro for commands in 'chunk'. + macro := fmt.Sprintf("alias%d", len(s)) + s[macro] = chunk + output = append(output, macro) + chunk = nil + case 1: + // Append commands in 'chunk' by themselves. + output = append(output, chunk...) + chunk = nil + case 2: + // Accumulate commands into 'chunk' + } + } + output = append(output, chunk...) + return s, output +} + +func newUI(t *testing.T, input []string) plugin.UI { + return &testUI{ + t: t, + input: input, + } +} + +type testUI struct { + t *testing.T + input []string + index int +} + +func (ui *testUI) ReadLine(_ string) (string, error) { + if ui.index >= len(ui.input) { + return "", io.EOF + } + input := ui.input[ui.index] + if input == "**error**" { + return "", fmt.Errorf("Error: %s", input) + } + ui.index++ + return input, nil +} + +func (ui *testUI) Print(args ...interface{}) { +} + +func (ui *testUI) PrintErr(args ...interface{}) { + output := fmt.Sprint(args) + if output != "" { + ui.t.Error(output) + } +} + +func (ui *testUI) IsTerminal() bool { + return false +} + +func (ui *testUI) SetAutoComplete(func(string) string) { +} + +func checkValue(p *profile.Profile, cmd []string, vars variables, o *plugin.Options) error { + if len(cmd) != 2 { + return fmt.Errorf("expected len(cmd)==2, got %v", cmd) + } + + input := cmd[1] + args := strings.SplitN(input, "=", 2) + if len(args) == 0 { + return fmt.Errorf("unexpected empty input") + } + name, value := args[0], "" + if len(args) == 2 { + value = args[1] + } + + gotv := vars[name] + if gotv == nil { + return fmt.Errorf("Could not find variable named %s", name) + } + + if got := gotv.stringValue(); got != value { + return fmt.Errorf("Variable %s, want %s, got %s", name, value, got) + } + return nil +} + +func interleave(input []string, seed int) []string { + var inputs [][]string + for _, s := range input { + inputs = append(inputs, strings.Split(s, ";")) + } + rand.Seed(int64(seed)) + var output []string + for len(inputs) > 0 { + next := rand.Intn(len(inputs)) + output = append(output, inputs[next][0]) + if tail := inputs[next][1:]; len(tail) > 0 { + inputs[next] = tail + } else { + inputs = append(inputs[:next], inputs[next+1:]...) + } + } + return output +} + +func TestInteractiveCommands(t *testing.T) { + type interactiveTestcase struct { + input string + want map[string]string + } + + testcases := []interactiveTestcase{ + { + "top 10 --cum focus1 -ignore focus2", + map[string]string{ + "functions": "true", + "nodecount": "10", + "cum": "true", + "focus": "focus1|focus2", + "ignore": "ignore", + }, + }, + { + "dot", + map[string]string{ + "functions": "true", + "nodecount": "80", + "cum": "false", + }, + }, + { + "tags -ignore1 -ignore2 focus1 >out", + map[string]string{ + "functions": "true", + "nodecount": "80", + "cum": "false", + "output": "out", + "tagfocus": "focus1", + "tagignore": "ignore1|ignore2", + }, + }, + {"weblist find -test", + map[string]string{ + "functions": "false", + "addressnoinlines": "true", + "nodecount": "0", + "cum": "false", + "flat": "true", + "ignore": "test", + }, + }, + { + "callgrind fun -ignore >out", + map[string]string{ + "functions": "false", + "lines": "true", + "nodecount": "0", + "cum": "false", + "flat": "true", + "output": "out", + }, + }, + } + + for _, tc := range testcases { + cmd, vars, err := parseCommandLine(strings.Fields(tc.input)) + vars = applyCommandOverrides(cmd, vars) + if err != nil { + t.Errorf("failed on %q: %v", tc.input, err) + } + + for n, want := range tc.want { + if got := vars[n].stringValue(); got != want { + t.Errorf("failed on %q, cmd=%q, %s got %s, want %s", tc.input, cmd, n, got, want) + } + } + } +} diff --git a/src/internal/driver/options.go b/src/internal/driver/options.go new file mode 100644 index 00000000..e225c2a9 --- /dev/null +++ b/src/internal/driver/options.go @@ -0,0 +1,148 @@ +// Copyright 2014 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package driver + +import ( + "bufio" + "flag" + "fmt" + "io" + "os" + "strings" + + "internal/binutils" + "internal/plugin" + "internal/symbolizer" +) + +// setDefaults returns a new plugin.Options with zero fields sets to +// sensible defaults. +func setDefaults(o *plugin.Options) *plugin.Options { + d := &plugin.Options{} + if o != nil { + *d = *o + } + if d.Writer == nil { + d.Writer = oswriter{} + } + if d.Flagset == nil { + d.Flagset = goFlags{} + } + if d.Obj == nil { + d.Obj = &binutils.Binutils{} + } + if d.UI == nil { + d.UI = &stdUI{r: bufio.NewReader(os.Stdin)} + } + if d.Sym == nil { + d.Sym = &symbolizer.Symbolizer{d.Obj, d.UI} + } + return d +} + +// goFlags returns a flagset implementation based on the standard flag +// package from the Go distribution. It implements the plugin.FlagSet +// interface. +type goFlags struct{} + +func (goFlags) Bool(o string, d bool, c string) *bool { + return flag.Bool(o, d, c) +} + +func (goFlags) Int(o string, d int, c string) *int { + return flag.Int(o, d, c) +} + +func (goFlags) Float64(o string, d float64, c string) *float64 { + return flag.Float64(o, d, c) +} + +func (goFlags) String(o, d, c string) *string { + return flag.String(o, d, c) +} + +func (goFlags) BoolVar(b *bool, o string, d bool, c string) { + flag.BoolVar(b, o, d, c) +} + +func (goFlags) IntVar(i *int, o string, d int, c string) { + flag.IntVar(i, o, d, c) +} + +func (goFlags) Float64Var(f *float64, o string, d float64, c string) { + flag.Float64Var(f, o, d, c) +} + +func (goFlags) StringVar(s *string, o, d, c string) { + flag.StringVar(s, o, d, c) +} + +func (goFlags) StringList(o, d, c string) *[]*string { + return &[]*string{flag.String(o, d, c)} +} + +func (goFlags) ExtraUsage() string { + return "" +} + +func (goFlags) Parse(usage func()) []string { + flag.Usage = usage + flag.Parse() + args := flag.Args() + if len(args) == 0 { + usage() + } + return args +} + +type stdUI struct { + r *bufio.Reader +} + +func (ui *stdUI) ReadLine(prompt string) (string, error) { + os.Stdout.WriteString(prompt) + return ui.r.ReadString('\n') +} + +func (ui *stdUI) Print(args ...interface{}) { + ui.fprint(os.Stderr, args) +} + +func (ui *stdUI) PrintErr(args ...interface{}) { + ui.fprint(os.Stderr, args) +} + +func (ui *stdUI) IsTerminal() bool { + return false +} + +func (ui *stdUI) SetAutoComplete(func(string) string) { +} + +func (ui *stdUI) fprint(f *os.File, args []interface{}) { + text := fmt.Sprint(args...) + if !strings.HasSuffix(text, "\n") { + text += "\n" + } + f.WriteString(text) +} + +// oswriter implements the Writer interface using a regular file. +type oswriter struct{} + +func (oswriter) Open(name string) (io.WriteCloser, error) { + f, err := os.Create(name) + return f, err +} diff --git a/src/internal/driver/tempfile.go b/src/internal/driver/tempfile.go new file mode 100644 index 00000000..43bc9c22 --- /dev/null +++ b/src/internal/driver/tempfile.go @@ -0,0 +1,54 @@ +// Copyright 2014 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package driver + +import ( + "fmt" + "os" + "path/filepath" + "sync" +) + +// newTempFile returns an unused filename for output files. +func newTempFile(dir, prefix, suffix string) (*os.File, error) { + for index := 1; index < 10000; index++ { + path := filepath.Join(dir, fmt.Sprintf("%s%03d%s", prefix, index, suffix)) + if _, err := os.Stat(path); err != nil { + return os.Create(path) + } + } + // Give up + return nil, fmt.Errorf("could not create file of the form %s%03d%s", prefix, 1, suffix) +} + +var tempFiles []string +var tempFilesMu = sync.Mutex{} + +// deferDeleteTempFile marks a file to be deleted by next call to Cleanup() +func deferDeleteTempFile(path string) { + tempFilesMu.Lock() + tempFiles = append(tempFiles, path) + tempFilesMu.Unlock() +} + +// cleanupTempFiles removes any temporary files selected for deferred cleaning. +func cleanupTempFiles() { + tempFilesMu.Lock() + for _, f := range tempFiles { + os.Remove(f) + } + tempFiles = nil + tempFilesMu.Unlock() +} diff --git a/src/internal/driver/testdata/cppbench.cpu b/src/internal/driver/testdata/cppbench.cpu new file mode 100644 index 00000000..95c22e1e Binary files /dev/null and b/src/internal/driver/testdata/cppbench.cpu differ diff --git a/src/internal/driver/testdata/cppbench.svg b/src/internal/driver/testdata/cppbench.svg new file mode 100644 index 00000000..f397c5e0 --- /dev/null +++ b/src/internal/driver/testdata/cppbench.svg @@ -0,0 +1,55 @@ + + + + + + +cppbench_server_main + +cluster_L + + + +File: cppbench_server_main + +File: cppbench_server_main +Type: cpu +0 of 7120000000ns total (0%) +Dropped 56 nodes (cum <= 35600000ns) +Showing top 2 nodes out of 38 (cum >= 7070000000ns) + + +N1 + + +start_thread +0 of 7120000000ns(100%) + + + + +N2 + + +RunWorkerLoop +0 of 7070000000ns(99.30%) + + + + +N1->N2 + + + + + + + 7070000000ns + + + + + diff --git a/src/internal/driver/testdata/file1000.src b/src/internal/driver/testdata/file1000.src new file mode 100644 index 00000000..b53eeca5 --- /dev/null +++ b/src/internal/driver/testdata/file1000.src @@ -0,0 +1,17 @@ +line1 +line2 +line3 +line4 +line5 +line6 +line7 +line8 +line9 +line0 +line1 +line2 +line3 +line4 +line5 + + diff --git a/src/internal/driver/testdata/file2000.src b/src/internal/driver/testdata/file2000.src new file mode 100644 index 00000000..b53eeca5 --- /dev/null +++ b/src/internal/driver/testdata/file2000.src @@ -0,0 +1,17 @@ +line1 +line2 +line3 +line4 +line5 +line6 +line7 +line8 +line9 +line0 +line1 +line2 +line3 +line4 +line5 + + diff --git a/src/internal/driver/testdata/file3000.src b/src/internal/driver/testdata/file3000.src new file mode 100644 index 00000000..b53eeca5 --- /dev/null +++ b/src/internal/driver/testdata/file3000.src @@ -0,0 +1,17 @@ +line1 +line2 +line3 +line4 +line5 +line6 +line7 +line8 +line9 +line0 +line1 +line2 +line3 +line4 +line5 + + diff --git a/src/internal/driver/testdata/go.crc32.cpu b/src/internal/driver/testdata/go.crc32.cpu new file mode 100644 index 00000000..ce08313d Binary files /dev/null and b/src/internal/driver/testdata/go.crc32.cpu differ diff --git a/src/internal/driver/testdata/pprof.contention.cum.files.dot b/src/internal/driver/testdata/pprof.contention.cum.files.dot new file mode 100644 index 00000000..29a34ab5 --- /dev/null +++ b/src/internal/driver/testdata/pprof.contention.cum.files.dot @@ -0,0 +1,10 @@ +digraph "." { +node [style=filled fillcolor="#f8f8f8"] +subgraph cluster_L { "Build ID: buildid-contention" [shape=box fontsize=16 label="Build ID: buildid-contention\lComment #1\lComment #2\lType: delay\lShowing nodes accounting for 149.50ms, 100% of 149.50ms total\l"] } +N1 [label="file3000.src\n32.77ms (21.92%)\nof 149.50ms (100%)" fontsize=20 shape=box tooltip="testdata/file3000.src (149.50ms)" color="#b20000" fillcolor="#edd5d5"] +N2 [label="file1000.src\n51.20ms (34.25%)" fontsize=23 shape=box tooltip="testdata/file1000.src (51.20ms)" color="#b23100" fillcolor="#eddbd5"] +N3 [label="file2000.src\n65.54ms (43.84%)\nof 75.78ms (50.68%)" fontsize=24 shape=box tooltip="testdata/file2000.src (75.78ms)" color="#b22000" fillcolor="#edd9d5"] +N1 -> N3 [label=" 75.78ms" weight=51 penwidth=3 color="#b22000" tooltip="testdata/file3000.src -> testdata/file2000.src (75.78ms)" labeltooltip="testdata/file3000.src -> testdata/file2000.src (75.78ms)"] +N1 -> N2 [label=" 40.96ms" weight=28 penwidth=2 color="#b23900" tooltip="testdata/file3000.src -> testdata/file1000.src (40.96ms)" labeltooltip="testdata/file3000.src -> testdata/file1000.src (40.96ms)"] +N3 -> N2 [label=" 10.24ms" weight=7 color="#b29775" tooltip="testdata/file2000.src -> testdata/file1000.src (10.24ms)" labeltooltip="testdata/file2000.src -> testdata/file1000.src (10.24ms)"] +} diff --git a/src/internal/driver/testdata/pprof.contention.flat.addresses.dot.focus.ignore b/src/internal/driver/testdata/pprof.contention.flat.addresses.dot.focus.ignore new file mode 100644 index 00000000..970b80ac --- /dev/null +++ b/src/internal/driver/testdata/pprof.contention.flat.addresses.dot.focus.ignore @@ -0,0 +1,9 @@ +digraph "." { +node [style=filled fillcolor="#f8f8f8"] +subgraph cluster_L { "Build ID: buildid-contention" [shape=box fontsize=16 label="Build ID: buildid-contention\lComment #1\lComment #2\lType: delay\lShowing nodes accounting for 40.96ms, 27.40% of 149.50ms total\l"] } +N1 [label="0000000000001000\nline1000\nfile1000.src:1\n40.96ms (27.40%)" fontsize=24 shape=box tooltip="0000000000001000 line1000 testdata/file1000.src:1 (40.96ms)" color="#b23900" fillcolor="#edddd5"] +N2 [label="0000000000003001\nline3000\nfile3000.src:5\n0 of 40.96ms (27.40%)" fontsize=8 shape=box tooltip="0000000000003001 line3000 testdata/file3000.src:5 (40.96ms)" color="#b23900" fillcolor="#edddd5"] +N3 [label="0000000000003001\nline3001\nfile3000.src:3\n0 of 40.96ms (27.40%)" fontsize=8 shape=box tooltip="0000000000003001 line3001 testdata/file3000.src:3 (40.96ms)" color="#b23900" fillcolor="#edddd5"] +N2 -> N3 [label=" 40.96ms\n (inline)" weight=28 penwidth=2 color="#b23900" tooltip="0000000000003001 line3000 testdata/file3000.src:5 -> 0000000000003001 line3001 testdata/file3000.src:3 (40.96ms)" labeltooltip="0000000000003001 line3000 testdata/file3000.src:5 -> 0000000000003001 line3001 testdata/file3000.src:3 (40.96ms)"] +N3 -> N1 [label=" 40.96ms" weight=28 penwidth=2 color="#b23900" tooltip="0000000000003001 line3001 testdata/file3000.src:3 -> 0000000000001000 line1000 testdata/file1000.src:1 (40.96ms)" labeltooltip="0000000000003001 line3001 testdata/file3000.src:3 -> 0000000000001000 line1000 testdata/file1000.src:1 (40.96ms)"] +} diff --git a/src/internal/driver/testdata/pprof.cpu.callgrind b/src/internal/driver/testdata/pprof.cpu.callgrind new file mode 100644 index 00000000..804dcb99 --- /dev/null +++ b/src/internal/driver/testdata/pprof.cpu.callgrind @@ -0,0 +1,77 @@ +events: cpu(ms) +fl=(1) testdata/file1000.src +fn=(1) line1000 +1 1100 + +fl=(2) testdata/file2000.src +fn=(2) line2001 +9 10 +cfl=(1) +cfn=(1) +calls=0 1 +9 1000 + +fl=(3) testdata/file3000.src +fn=(3) line3002 +2 10 +cfl=(2) +cfn=(4) line2000 +calls=0 4 +2 1000 + +fl=(2) +fn=(4) +4 0 +cfl=(2) +cfn=(2) +calls=0 9 +4 1010 + +fl=(3) +fn=(5) line3000 +6 0 +cfl=(3) +cfn=(6) line3001 +calls=0 5 +6 1010 + +fl=(3) +fn=(5) +7 0 +cfl=(3) +cfn=(3) +calls=0 5 +7 10 + +fl=(3) +fn=(5) +9 0 +cfl=(3) +cfn=(6) +calls=0 8 +9 100 + +fl=(3) +fn=(6) +5 0 +cfl=(3) +cfn=(3) +calls=0 2 +5 1010 + +fl=(3) +fn=(6) +8 0 +cfl=(1) +cfn=(1) +calls=0 1 +8 100 + +fl=(3) +fn=(3) +5 0 +cfl=(2) +cfn=(4) +calls=0 4 +5 10 + diff --git a/src/internal/driver/testdata/pprof.cpu.cum.lines.text.hide b/src/internal/driver/testdata/pprof.cpu.cum.lines.text.hide new file mode 100644 index 00000000..9d172713 --- /dev/null +++ b/src/internal/driver/testdata/pprof.cpu.cum.lines.text.hide @@ -0,0 +1,5 @@ +Showing nodes accounting for 1.11s, 99.11% of 1.12s total + flat flat% sum% cum cum% + 1.10s 98.21% 98.21% 1.10s 98.21% line1000 testdata/file1000.src:1 + 0 0% 98.21% 1.01s 90.18% line2000 testdata/file2000.src:4 + 0.01s 0.89% 99.11% 1.01s 90.18% line2001 testdata/file2000.src:9 (inline) diff --git a/src/internal/driver/testdata/pprof.cpu.cum.lines.text.show b/src/internal/driver/testdata/pprof.cpu.cum.lines.text.show new file mode 100644 index 00000000..9d172713 --- /dev/null +++ b/src/internal/driver/testdata/pprof.cpu.cum.lines.text.show @@ -0,0 +1,5 @@ +Showing nodes accounting for 1.11s, 99.11% of 1.12s total + flat flat% sum% cum cum% + 1.10s 98.21% 98.21% 1.10s 98.21% line1000 testdata/file1000.src:1 + 0 0% 98.21% 1.01s 90.18% line2000 testdata/file2000.src:4 + 0.01s 0.89% 99.11% 1.01s 90.18% line2001 testdata/file2000.src:9 (inline) diff --git a/src/internal/driver/testdata/pprof.cpu.cum.lines.topproto.hide b/src/internal/driver/testdata/pprof.cpu.cum.lines.topproto.hide new file mode 100644 index 00000000..10c1089e --- /dev/null +++ b/src/internal/driver/testdata/pprof.cpu.cum.lines.topproto.hide @@ -0,0 +1,5 @@ +Showing nodes accounting for 1s, 100% of 1s total + flat flat% sum% cum cum% + 1s 100% 100% 1s 100% mangled1000 testdata/file1000.src:1 + 0 0% 100% 0 0% mangled2000 testdata/file2000.src:4 + 0 0% 100% 0 0% mangled2001 testdata/file2000.src:9 diff --git a/src/internal/driver/testdata/pprof.cpu.flat.addresses.disasm b/src/internal/driver/testdata/pprof.cpu.flat.addresses.disasm new file mode 100644 index 00000000..2e33acb1 --- /dev/null +++ b/src/internal/driver/testdata/pprof.cpu.flat.addresses.disasm @@ -0,0 +1,14 @@ +Total: 1.12s +ROUTINE ======================== line1000 + 1.10s 1.10s (flat, cum) 98.21% of Total + 1.10s 1.10s 1000: instruction one + . . 1001: instruction two + . . 1002: instruction three + . . 1003: instruction four +ROUTINE ======================== line3000 + 10ms 1.12s (flat, cum) 100% of Total + 10ms 1.01s 3000: instruction one + . 100ms 3001: instruction two + . 10ms 3002: instruction three + . . 3003: instruction four + . . 3004: instruction five diff --git a/src/internal/driver/testdata/pprof.cpu.flat.addresses.weblist b/src/internal/driver/testdata/pprof.cpu.flat.addresses.weblist new file mode 100644 index 00000000..d9d0eab3 --- /dev/null +++ b/src/internal/driver/testdata/pprof.cpu.flat.addresses.weblist @@ -0,0 +1,109 @@ + + + + +Pprof listing + + + + + +
File: testbinary
+Type: cpu
+Duration: 10s, Total samples = 1.12s (11.20%)
Total: 1.12s

line1000

testdata/file1000.src +
+  Total:       1.10s      1.10s (flat, cum) 98.21%
+      1        1.10s      1.10s line1                1.10s      1.10s     1000: instruction one                                  testdata/file1000.src:1
+                   .          .     1001: instruction two                                  
+                   .          .     1002: instruction three                                
+                   .          .     1003: instruction four                                 
+
+      2            .          . line2 
+      3            .          . line3 
+      4            .          . line4 
+      5            .          . line5 
+      6            .          . line6 
+
+

line3000

testdata/file3000.src +
+  Total:        10ms      1.12s (flat, cum)   100%
+      1            .          . line1 
+      2            .          . line2 
+      3            .          . line3 
+      4            .          . line4 
+      5            .          . line5 
+      6         10ms      1.01s line6                 10ms      1.01s     3000: instruction one                                  testdata/file3000.src:6
+
+      7            .       10ms line7                    .       10ms     3002: instruction three                                testdata/file3000.src:7
+                   .          .     3003: instruction four                                 
+                   .          .     3004: instruction five                                 
+
+      8            .          . line8 
+      9            .      100ms line9                    .      100ms     3001: instruction two                                  testdata/file3000.src:9
+
+     10            .          . line0 
+     11            .          . line1 
+     12            .          . line2 
+     13            .          . line3 
+     14            .          . line4 
+
+ + + + diff --git a/src/internal/driver/testdata/pprof.cpu.flat.functions.dot b/src/internal/driver/testdata/pprof.cpu.flat.functions.dot new file mode 100644 index 00000000..18b1abf5 --- /dev/null +++ b/src/internal/driver/testdata/pprof.cpu.flat.functions.dot @@ -0,0 +1,20 @@ +digraph "testbinary" { +node [style=filled fillcolor="#f8f8f8"] +subgraph cluster_L { "File: testbinary" [shape=box fontsize=16 label="File: testbinary\lType: cpu\lDuration: 10s, Total samples = 1.12s (11.20%)\lShowing nodes accounting for 1.12s, 100% of 1.12s total\l"] } +N1 [label="line1000\nfile1000.src\n1.10s (98.21%)" fontsize=24 shape=box tooltip="line1000 testdata/file1000.src (1.10s)" color="#b20000" fillcolor="#edd5d5"] +N1_0 [label = "key1:tag1\nkey2:tag1" fontsize=8 shape=box3d tooltip="1s"] +N1 -> N1_0 [label=" 1s" weight=100 tooltip="1s" labeltooltip="1s"] +N1_1 [label = "key1:tag2\nkey3:tag2" fontsize=8 shape=box3d tooltip="0.10s"] +N1 -> N1_1 [label=" 0.10s" weight=100 tooltip="0.10s" labeltooltip="0.10s"] +N2 [label="line3000\nfile3000.src\n0 of 1.12s (100%)" fontsize=8 shape=box tooltip="line3000 testdata/file3000.src (1.12s)" color="#b20000" fillcolor="#edd5d5"] +N3 [label="line3001\nfile3000.src\n0 of 1.11s (99.11%)" fontsize=8 shape=box tooltip="line3001 testdata/file3000.src (1.11s)" color="#b20000" fillcolor="#edd5d5"] +N4 [label="line3002\nfile3000.src\n0.01s (0.89%)\nof 1.02s (91.07%)" fontsize=10 shape=box tooltip="line3002 testdata/file3000.src (1.02s)" color="#b20400" fillcolor="#edd6d5"] +N5 [label="line2001\nfile2000.src\n0.01s (0.89%)\nof 1.01s (90.18%)" fontsize=10 shape=box tooltip="line2001 testdata/file2000.src (1.01s)" color="#b20500" fillcolor="#edd6d5"] +N6 [label="line2000\nfile2000.src\n0 of 1.01s (90.18%)" fontsize=8 shape=box tooltip="line2000 testdata/file2000.src (1.01s)" color="#b20500" fillcolor="#edd6d5"] +N2 -> N3 [label=" 1.11s\n (inline)" weight=100 penwidth=5 color="#b20000" tooltip="line3000 testdata/file3000.src -> line3001 testdata/file3000.src (1.11s)" labeltooltip="line3000 testdata/file3000.src -> line3001 testdata/file3000.src (1.11s)"] +N6 -> N5 [label=" 1.01s\n (inline)" weight=91 penwidth=5 color="#b20500" tooltip="line2000 testdata/file2000.src -> line2001 testdata/file2000.src (1.01s)" labeltooltip="line2000 testdata/file2000.src -> line2001 testdata/file2000.src (1.01s)"] +N3 -> N4 [label=" 1.01s\n (inline)" weight=91 penwidth=5 color="#b20500" tooltip="line3001 testdata/file3000.src -> line3002 testdata/file3000.src (1.01s)" labeltooltip="line3001 testdata/file3000.src -> line3002 testdata/file3000.src (1.01s)"] +N4 -> N6 [label=" 1.01s" weight=91 penwidth=5 color="#b20500" tooltip="line3002 testdata/file3000.src -> line2000 testdata/file2000.src (1.01s)" labeltooltip="line3002 testdata/file3000.src -> line2000 testdata/file2000.src (1.01s)"] +N5 -> N1 [label=" 1s" weight=90 penwidth=5 color="#b20500" tooltip="line2001 testdata/file2000.src -> line1000 testdata/file1000.src (1s)" labeltooltip="line2001 testdata/file2000.src -> line1000 testdata/file1000.src (1s)"] +N3 -> N1 [label=" 0.10s" weight=9 color="#b28b62" tooltip="line3001 testdata/file3000.src -> line1000 testdata/file1000.src (0.10s)" labeltooltip="line3001 testdata/file3000.src -> line1000 testdata/file1000.src (0.10s)"] +} diff --git a/src/internal/driver/testdata/pprof.cpu.flat.functions.text b/src/internal/driver/testdata/pprof.cpu.flat.functions.text new file mode 100644 index 00000000..0807ed23 --- /dev/null +++ b/src/internal/driver/testdata/pprof.cpu.flat.functions.text @@ -0,0 +1,8 @@ +Showing nodes accounting for 1.12s, 100% of 1.12s total + flat flat% sum% cum cum% + 1.10s 98.21% 98.21% 1.10s 98.21% line1000 testdata/file1000.src + 0.01s 0.89% 99.11% 1.01s 90.18% line2001 testdata/file2000.src (inline) + 0.01s 0.89% 100% 1.02s 91.07% line3002 testdata/file3000.src (inline) + 0 0% 100% 1.01s 90.18% line2000 testdata/file2000.src + 0 0% 100% 1.12s 100% line3000 testdata/file3000.src + 0 0% 100% 1.11s 99.11% line3001 testdata/file3000.src (inline) diff --git a/src/internal/driver/testdata/pprof.cpu.peek b/src/internal/driver/testdata/pprof.cpu.peek new file mode 100644 index 00000000..1a4a70c4 --- /dev/null +++ b/src/internal/driver/testdata/pprof.cpu.peek @@ -0,0 +1,13 @@ +Showing nodes accounting for 1.12s, 100% of 1.12s total +----------------------------------------------------------+------------- + flat flat% sum% cum cum% calls calls% + context +----------------------------------------------------------+------------- + 1.01s 100% | line2000 testdata/file2000.src (inline) + 0.01s 0.89% 0.89% 1.01s 90.18% | line2001 testdata/file2000.src + 1s 99.01% | line1000 testdata/file1000.src +----------------------------------------------------------+------------- + 1.11s 100% | line3000 testdata/file3000.src (inline) + 0 0% 0.89% 1.11s 99.11% | line3001 testdata/file3000.src + 1.01s 90.99% | line3002 testdata/file3000.src (inline) + 0.10s 9.01% | line1000 testdata/file1000.src +----------------------------------------------------------+------------- diff --git a/src/internal/driver/testdata/pprof.cpu.svg b/src/internal/driver/testdata/pprof.cpu.svg new file mode 100644 index 00000000..70f563fb --- /dev/null +++ b/src/internal/driver/testdata/pprof.cpu.svg @@ -0,0 +1,54 @@ + + + + + + +cppbench_server_main + +cluster_L + + + +File: cppbench_server_main + +File: cppbench_server_main +Type: cpu +0 of 7120000000ns total (0%) +Dropped 56 nodes (cum <= 35600000ns) +Showing top 2 nodes out of 38 (cum >= 7070000000ns) + + +N1 + + +start_thread +0 of 7120000000ns(100%) + + + + +N2 + + +RunWorkerLoop +0 of 7070000000ns(99.30%) + + + + +N1->N2 + + + + + + + 7070000000ns + + + + + diff --git a/src/internal/driver/testdata/pprof.cpu.tags b/src/internal/driver/testdata/pprof.cpu.tags new file mode 100644 index 00000000..fc784f0c --- /dev/null +++ b/src/internal/driver/testdata/pprof.cpu.tags @@ -0,0 +1,13 @@ +key1: Total 1120 + 1000 (89.29%): tag1 + 100 ( 8.93%): tag2 + 10 ( 0.89%): tag3 + 10 ( 0.89%): tag4 + +key2: Total 1020 + 1010 (99.02%): tag1 + 10 ( 0.98%): tag2 + +key3: Total 100 + 100 ( 100%): tag2 + diff --git a/src/internal/driver/testdata/pprof.cpu.tags.focus.ignore b/src/internal/driver/testdata/pprof.cpu.tags.focus.ignore new file mode 100644 index 00000000..650ebb1f --- /dev/null +++ b/src/internal/driver/testdata/pprof.cpu.tags.focus.ignore @@ -0,0 +1,6 @@ +key1: Total 100 + 100 ( 100%): tag2 + +key3: Total 100 + 100 ( 100%): tag2 + diff --git a/src/internal/driver/testdata/pprof.cpu.traces b/src/internal/driver/testdata/pprof.cpu.traces new file mode 100644 index 00000000..d59fe30f --- /dev/null +++ b/src/internal/driver/testdata/pprof.cpu.traces @@ -0,0 +1,32 @@ +File: testbinary +Type: cpu +Duration: 10s, Total samples = 1.12s (11.20%) +-----------+------------------------------------------------------- + key1: tag1 + key2: tag1 + 1s line1000 testdata/file1000.src + line2001 testdata/file2000.src + line2000 testdata/file2000.src + line3002 testdata/file3000.src + line3001 testdata/file3000.src + line3000 testdata/file3000.src +-----------+------------------------------------------------------- + key1: tag2 + key3: tag2 + 100ms line1000 testdata/file1000.src + line3001 testdata/file3000.src + line3000 testdata/file3000.src +-----------+------------------------------------------------------- + key1: tag3 + key2: tag2 + 10ms line2001 testdata/file2000.src + line2000 testdata/file2000.src + line3002 testdata/file3000.src + line3000 testdata/file3000.src +-----------+------------------------------------------------------- + key1: tag4 + key2: tag1 + 10ms line3002 testdata/file3000.src + line3001 testdata/file3000.src + line3000 testdata/file3000.src +-----------+------------------------------------------------------- diff --git a/src/internal/driver/testdata/pprof.cpusmall.flat.addresses.tree b/src/internal/driver/testdata/pprof.cpusmall.flat.addresses.tree new file mode 100644 index 00000000..f4254653 --- /dev/null +++ b/src/internal/driver/testdata/pprof.cpusmall.flat.addresses.tree @@ -0,0 +1,17 @@ +Showing nodes accounting for 4s, 100% of 4s total +Showing top 4 nodes out of 5 (cum >= 2s) +----------------------------------------------------------+------------- + flat flat% sum% cum cum% calls calls% + context +----------------------------------------------------------+------------- + 1s 100% | 0000000000003000 [testbinary] + 1s 25.00% 25.00% 1s 25.00% | 0000000000001000 [testbinary] +----------------------------------------------------------+------------- + 1s 25.00% 50.00% 2s 50.00% | 0000000000003000 [testbinary] + 1s 50.00% | 0000000000001000 [testbinary] +----------------------------------------------------------+------------- + 1s 100% | 0000000000005000 [testbinary] + 1s 25.00% 75.00% 1s 25.00% | 0000000000004000 [testbinary] +----------------------------------------------------------+------------- + 1s 25.00% 100% 2s 50.00% | 0000000000005000 [testbinary] + 1s 50.00% | 0000000000004000 [testbinary] +----------------------------------------------------------+------------- diff --git a/src/internal/driver/testdata/pprof.heap.callgrind b/src/internal/driver/testdata/pprof.heap.callgrind new file mode 100644 index 00000000..2859e941 --- /dev/null +++ b/src/internal/driver/testdata/pprof.heap.callgrind @@ -0,0 +1,53 @@ +events: inuse_space(MB) +fl=(1) testdata/file2000.src +fn=(1) line2001 +2 62 +cfl=(2) testdata/file1000.src +cfn=(2) line1000 +calls=0 1 +2 0 + +fl=(3) testdata/file3000.src +fn=(3) line3002 +3 31 +cfl=(1) +cfn=(4) line2000 +calls=0 3 +3 63 + +fl=(2) +fn=(2) +1 4 + +fl=(1) +fn=(4) +3 0 +cfl=(1) +cfn=(1) +calls=0 2 +3 63 + +fl=(3) +fn=(5) line3000 +4 0 +cfl=(3) +cfn=(3) +calls=0 3 +4 62 +cfl=(3) +cfn=(6) line3001 +calls=0 2 +4 36 + +fl=(3) +fn=(6) +2 0 +cfl=(3) +cfn=(3) +calls=0 3 +2 32 +cfl=(2) +cfn=(2) +calls=0 1 +2 3 + diff --git a/src/internal/driver/testdata/pprof.heap.cum.lines.tree.focus b/src/internal/driver/testdata/pprof.heap.cum.lines.tree.focus new file mode 100644 index 00000000..cda6d65b --- /dev/null +++ b/src/internal/driver/testdata/pprof.heap.cum.lines.tree.focus @@ -0,0 +1,19 @@ +Showing nodes accounting for 62.50MB, 63.37% of 98.63MB total +Dropped 2 nodes (cum <= 4.93MB) +----------------------------------------------------------+------------- + flat flat% sum% cum cum% calls calls% + context +----------------------------------------------------------+------------- + 63.48MB 100% | line3002 testdata/file3000.src:3 + 0 0% 0% 63.48MB 64.36% | line2000 testdata/file2000.src:3 + 63.48MB 100% | line2001 testdata/file2000.src:2 (inline) +----------------------------------------------------------+------------- + 63.48MB 100% | line2000 testdata/file2000.src:3 (inline) + 62.50MB 63.37% 63.37% 63.48MB 64.36% | line2001 testdata/file2000.src:2 +----------------------------------------------------------+------------- + 0 0% 63.37% 63.48MB 64.36% | line3000 testdata/file3000.src:4 + 63.48MB 100% | line3002 testdata/file3000.src:3 (inline) +----------------------------------------------------------+------------- + 63.48MB 100% | line3000 testdata/file3000.src:4 (inline) + 0 0% 63.37% 63.48MB 64.36% | line3002 testdata/file3000.src:3 + 63.48MB 100% | line2000 testdata/file2000.src:3 +----------------------------------------------------------+------------- diff --git a/src/internal/driver/testdata/pprof.heap.cum.relative_percentages.tree.focus b/src/internal/driver/testdata/pprof.heap.cum.relative_percentages.tree.focus new file mode 100644 index 00000000..35f0bf57 --- /dev/null +++ b/src/internal/driver/testdata/pprof.heap.cum.relative_percentages.tree.focus @@ -0,0 +1,19 @@ +Showing nodes accounting for 62.50MB, 98.46% of 63.48MB total +Dropped 2 nodes (cum <= 3.17MB) +----------------------------------------------------------+------------- + flat flat% sum% cum cum% calls calls% + context +----------------------------------------------------------+------------- + 63.48MB 100% | line3002 testdata/file3000.src + 0 0% 0% 63.48MB 100% | line2000 testdata/file2000.src + 63.48MB 100% | line2001 testdata/file2000.src (inline) +----------------------------------------------------------+------------- + 63.48MB 100% | line2000 testdata/file2000.src (inline) + 62.50MB 98.46% 98.46% 63.48MB 100% | line2001 testdata/file2000.src +----------------------------------------------------------+------------- + 0 0% 98.46% 63.48MB 100% | line3000 testdata/file3000.src + 63.48MB 100% | line3002 testdata/file3000.src (inline) +----------------------------------------------------------+------------- + 63.48MB 100% | line3000 testdata/file3000.src (inline) + 0 0% 98.46% 63.48MB 100% | line3002 testdata/file3000.src + 63.48MB 100% | line2000 testdata/file2000.src +----------------------------------------------------------+------------- diff --git a/src/internal/driver/testdata/pprof.heap.flat.files.seconds.text b/src/internal/driver/testdata/pprof.heap.flat.files.seconds.text new file mode 100644 index 00000000..b9571ef4 --- /dev/null +++ b/src/internal/driver/testdata/pprof.heap.flat.files.seconds.text @@ -0,0 +1,2 @@ +Showing nodes accounting for 0, 0% of 0 total + flat flat% sum% cum cum% diff --git a/src/internal/driver/testdata/pprof.heap.flat.files.text b/src/internal/driver/testdata/pprof.heap.flat.files.text new file mode 100644 index 00000000..fd536df5 --- /dev/null +++ b/src/internal/driver/testdata/pprof.heap.flat.files.text @@ -0,0 +1,5 @@ +Showing nodes accounting for 93.75MB, 95.05% of 98.63MB total +Dropped 1 node (cum <= 4.93MB) + flat flat% sum% cum cum% + 62.50MB 63.37% 63.37% 63.48MB 64.36% testdata/file2000.src + 31.25MB 31.68% 95.05% 98.63MB 100% testdata/file3000.src diff --git a/src/internal/driver/testdata/pprof.heap.flat.inuse_objects.text b/src/internal/driver/testdata/pprof.heap.flat.inuse_objects.text new file mode 100644 index 00000000..bc061ad7 --- /dev/null +++ b/src/internal/driver/testdata/pprof.heap.flat.inuse_objects.text @@ -0,0 +1,8 @@ +Showing nodes accounting for 150, 100% of 150 total + flat flat% sum% cum cum% + 80 53.33% 53.33% 130 86.67% line3002 testdata/file3000.src (inline) + 40 26.67% 80.00% 50 33.33% line2001 testdata/file2000.src (inline) + 30 20.00% 100% 30 20.00% line1000 testdata/file1000.src + 0 0% 100% 50 33.33% line2000 testdata/file2000.src + 0 0% 100% 150 100% line3000 testdata/file3000.src + 0 0% 100% 110 73.33% line3001 testdata/file3000.src (inline) diff --git a/src/internal/driver/testdata/pprof.heap.flat.inuse_space.dot.focus b/src/internal/driver/testdata/pprof.heap.flat.inuse_space.dot.focus new file mode 100644 index 00000000..bb1cda8f --- /dev/null +++ b/src/internal/driver/testdata/pprof.heap.flat.inuse_space.dot.focus @@ -0,0 +1,13 @@ +digraph "." { +node [style=filled fillcolor="#f8f8f8"] +subgraph cluster_L { "Build ID: buildid" [shape=box fontsize=16 label="Build ID: buildid\lType: inuse_space\lShowing nodes accounting for 62.50MB, 63.37% of 98.63MB total\l"] } +N1 [label="line2001\nfile2000.src\n62.50MB (63.37%)" fontsize=24 shape=box tooltip="line2001 testdata/file2000.src (62.50MB)" color="#b21600" fillcolor="#edd8d5"] +NN1_0 [label = "1.56MB" fontsize=8 shape=box3d tooltip="62.50MB"] +N1 -> NN1_0 [label=" 62.50MB" weight=100 tooltip="62.50MB" labeltooltip="62.50MB"] +N2 [label="line3000\nfile3000.src\n0 of 62.50MB (63.37%)" fontsize=8 shape=box tooltip="line3000 testdata/file3000.src (62.50MB)" color="#b21600" fillcolor="#edd8d5"] +N3 [label="line2000\nfile2000.src\n0 of 62.50MB (63.37%)" fontsize=8 shape=box tooltip="line2000 testdata/file2000.src (62.50MB)" color="#b21600" fillcolor="#edd8d5"] +N4 [label="line3002\nfile3000.src\n0 of 62.50MB (63.37%)" fontsize=8 shape=box tooltip="line3002 testdata/file3000.src (62.50MB)" color="#b21600" fillcolor="#edd8d5"] +N3 -> N1 [label=" 62.50MB\n (inline)" weight=64 penwidth=4 color="#b21600" tooltip="line2000 testdata/file2000.src -> line2001 testdata/file2000.src (62.50MB)" labeltooltip="line2000 testdata/file2000.src -> line2001 testdata/file2000.src (62.50MB)"] +N2 -> N4 [label=" 62.50MB\n (inline)" weight=64 penwidth=4 color="#b21600" tooltip="line3000 testdata/file3000.src -> line3002 testdata/file3000.src (62.50MB)" labeltooltip="line3000 testdata/file3000.src -> line3002 testdata/file3000.src (62.50MB)"] +N4 -> N3 [label=" 62.50MB" weight=64 penwidth=4 color="#b21600" tooltip="line3002 testdata/file3000.src -> line2000 testdata/file2000.src (62.50MB)" labeltooltip="line3002 testdata/file3000.src -> line2000 testdata/file2000.src (62.50MB)"] +} diff --git a/src/internal/driver/testdata/pprof.heap.flat.inuse_space.dot.focus.ignore b/src/internal/driver/testdata/pprof.heap.flat.inuse_space.dot.focus.ignore new file mode 100644 index 00000000..2d27e94b --- /dev/null +++ b/src/internal/driver/testdata/pprof.heap.flat.inuse_space.dot.focus.ignore @@ -0,0 +1,15 @@ +digraph "." { +node [style=filled fillcolor="#f8f8f8"] +subgraph cluster_L { "Build ID: buildid" [shape=box fontsize=16 label="Build ID: buildid\lType: inuse_space\lShowing nodes accounting for 36.13MB, 36.63% of 98.63MB total\lDropped 2 nodes (cum <= 4.93MB)\l"] } +N1 [label="line3002\nfile3000.src\n31.25MB (31.68%)\nof 32.23MB (32.67%)" fontsize=24 shape=box tooltip="line3002 testdata/file3000.src (32.23MB)" color="#b23200" fillcolor="#eddcd5"] +NN1_0 [label = "400kB" fontsize=8 shape=box3d tooltip="31.25MB"] +N1 -> NN1_0 [label=" 31.25MB" weight=100 tooltip="31.25MB" labeltooltip="31.25MB"] +N2 [label="line3000\nfile3000.src\n0 of 36.13MB (36.63%)" fontsize=8 shape=box tooltip="line3000 testdata/file3000.src (36.13MB)" color="#b22e00" fillcolor="#eddbd5"] +N3 [label="line3001\nfile3000.src\n0 of 36.13MB (36.63%)" fontsize=8 shape=box tooltip="line3001 testdata/file3000.src (36.13MB)" color="#b22e00" fillcolor="#eddbd5"] +N4 [label="line1000\nfile1000.src\n4.88MB (4.95%)" fontsize=15 shape=box tooltip="line1000 testdata/file1000.src (4.88MB)" color="#b2a086" fillcolor="#edeae7"] +NN4_0 [label = "200kB" fontsize=8 shape=box3d tooltip="3.91MB"] +N4 -> NN4_0 [label=" 3.91MB" weight=100 tooltip="3.91MB" labeltooltip="3.91MB"] +N2 -> N3 [label=" 36.13MB\n (inline)" weight=37 penwidth=2 color="#b22e00" tooltip="line3000 testdata/file3000.src -> line3001 testdata/file3000.src (36.13MB)" labeltooltip="line3000 testdata/file3000.src -> line3001 testdata/file3000.src (36.13MB)"] +N3 -> N1 [label=" 32.23MB\n (inline)" weight=33 penwidth=2 color="#b23200" tooltip="line3001 testdata/file3000.src -> line3002 testdata/file3000.src (32.23MB)" labeltooltip="line3001 testdata/file3000.src -> line3002 testdata/file3000.src (32.23MB)"] +N3 -> N4 [label=" 3.91MB" weight=4 color="#b2a58f" tooltip="line3001 testdata/file3000.src -> line1000 testdata/file1000.src (3.91MB)" labeltooltip="line3001 testdata/file3000.src -> line1000 testdata/file1000.src (3.91MB)"] +} diff --git a/src/internal/driver/testdata/pprof.heap.flat.lines.dot.focus b/src/internal/driver/testdata/pprof.heap.flat.lines.dot.focus new file mode 100644 index 00000000..d9e2a60e --- /dev/null +++ b/src/internal/driver/testdata/pprof.heap.flat.lines.dot.focus @@ -0,0 +1,21 @@ +digraph "." { +node [style=filled fillcolor="#f8f8f8"] +subgraph cluster_L { "Build ID: buildid" [shape=box fontsize=16 label="Build ID: buildid\lType: inuse_space\lShowing nodes accounting for 67.38MB, 68.32% of 98.63MB total\l"] } +N1 [label="line3000\nfile3000.src:4\n0 of 67.38MB (68.32%)" fontsize=8 shape=box tooltip="line3000 testdata/file3000.src:4 (67.38MB)" color="#b21300" fillcolor="#edd7d5"] +N2 [label="line2001\nfile2000.src:2\n62.50MB (63.37%)\nof 63.48MB (64.36%)" fontsize=24 shape=box tooltip="line2001 testdata/file2000.src:2 (63.48MB)" color="#b21600" fillcolor="#edd8d5"] +NN2_0 [label = "1.56MB" fontsize=8 shape=box3d tooltip="62.50MB"] +N2 -> NN2_0 [label=" 62.50MB" weight=100 tooltip="62.50MB" labeltooltip="62.50MB"] +N3 [label="line1000\nfile1000.src:1\n4.88MB (4.95%)" fontsize=13 shape=box tooltip="line1000 testdata/file1000.src:1 (4.88MB)" color="#b2a086" fillcolor="#edeae7"] +NN3_0 [label = "200kB" fontsize=8 shape=box3d tooltip="3.91MB"] +N3 -> NN3_0 [label=" 3.91MB" weight=100 tooltip="3.91MB" labeltooltip="3.91MB"] +N4 [label="line3002\nfile3000.src:3\n0 of 63.48MB (64.36%)" fontsize=8 shape=box tooltip="line3002 testdata/file3000.src:3 (63.48MB)" color="#b21600" fillcolor="#edd8d5"] +N5 [label="line3001\nfile3000.src:2\n0 of 4.88MB (4.95%)" fontsize=8 shape=box tooltip="line3001 testdata/file3000.src:2 (4.88MB)" color="#b2a086" fillcolor="#edeae7"] +N6 [label="line2000\nfile2000.src:3\n0 of 63.48MB (64.36%)" fontsize=8 shape=box tooltip="line2000 testdata/file2000.src:3 (63.48MB)" color="#b21600" fillcolor="#edd8d5"] +N6 -> N2 [label=" 63.48MB\n (inline)" weight=65 penwidth=4 color="#b21600" tooltip="line2000 testdata/file2000.src:3 -> line2001 testdata/file2000.src:2 (63.48MB)" labeltooltip="line2000 testdata/file2000.src:3 -> line2001 testdata/file2000.src:2 (63.48MB)"] +N4 -> N6 [label=" 63.48MB" weight=65 penwidth=4 color="#b21600" tooltip="line3002 testdata/file3000.src:3 -> line2000 testdata/file2000.src:3 (63.48MB)" labeltooltip="line3002 testdata/file3000.src:3 -> line2000 testdata/file2000.src:3 (63.48MB)"] +N1 -> N4 [label=" 62.50MB\n (inline)" weight=64 penwidth=4 color="#b21600" tooltip="line3000 testdata/file3000.src:4 -> line3002 testdata/file3000.src:3 (62.50MB)" labeltooltip="line3000 testdata/file3000.src:4 -> line3002 testdata/file3000.src:3 (62.50MB)"] +N1 -> N5 [label=" 4.88MB\n (inline)" weight=5 color="#b2a086" tooltip="line3000 testdata/file3000.src:4 -> line3001 testdata/file3000.src:2 (4.88MB)" labeltooltip="line3000 testdata/file3000.src:4 -> line3001 testdata/file3000.src:2 (4.88MB)"] +N5 -> N3 [label=" 3.91MB" weight=4 color="#b2a58f" tooltip="line3001 testdata/file3000.src:2 -> line1000 testdata/file1000.src:1 (3.91MB)" labeltooltip="line3001 testdata/file3000.src:2 -> line1000 testdata/file1000.src:1 (3.91MB)"] +N2 -> N3 [label=" 0.98MB" color="#b2b0a9" tooltip="line2001 testdata/file2000.src:2 -> line1000 testdata/file1000.src:1 (0.98MB)" labeltooltip="line2001 testdata/file2000.src:2 -> line1000 testdata/file1000.src:1 (0.98MB)" minlen=2] +N5 -> N4 [label=" 0.98MB\n (inline)" color="#b2b0a9" tooltip="line3001 testdata/file3000.src:2 -> line3002 testdata/file3000.src:3 (0.98MB)" labeltooltip="line3001 testdata/file3000.src:2 -> line3002 testdata/file3000.src:3 (0.98MB)"] +} diff --git a/src/internal/driver/testdata/pprof.heap_alloc.flat.alloc_objects.text b/src/internal/driver/testdata/pprof.heap_alloc.flat.alloc_objects.text new file mode 100644 index 00000000..bc061ad7 --- /dev/null +++ b/src/internal/driver/testdata/pprof.heap_alloc.flat.alloc_objects.text @@ -0,0 +1,8 @@ +Showing nodes accounting for 150, 100% of 150 total + flat flat% sum% cum cum% + 80 53.33% 53.33% 130 86.67% line3002 testdata/file3000.src (inline) + 40 26.67% 80.00% 50 33.33% line2001 testdata/file2000.src (inline) + 30 20.00% 100% 30 20.00% line1000 testdata/file1000.src + 0 0% 100% 50 33.33% line2000 testdata/file2000.src + 0 0% 100% 150 100% line3000 testdata/file3000.src + 0 0% 100% 110 73.33% line3001 testdata/file3000.src (inline) diff --git a/src/internal/driver/testdata/pprof.heap_alloc.flat.alloc_space.dot.focus b/src/internal/driver/testdata/pprof.heap_alloc.flat.alloc_space.dot.focus new file mode 100644 index 00000000..a878ce46 --- /dev/null +++ b/src/internal/driver/testdata/pprof.heap_alloc.flat.alloc_space.dot.focus @@ -0,0 +1,18 @@ +digraph "." { +node [style=filled fillcolor="#f8f8f8"] +subgraph cluster_L { "Build ID: buildid" [shape=box fontsize=16 label="Build ID: buildid\lType: alloc_space\lShowing nodes accounting for 93.75MB, 95.05% of 98.63MB total\lDropped 1 node (cum <= 4.93MB)\l"] } +N1 [label="line3002\nfile3000.src\n31.25MB (31.68%)\nof 94.73MB (96.04%)" fontsize=20 shape=box tooltip="line3002 testdata/file3000.src (94.73MB)" color="#b20200" fillcolor="#edd5d5"] +NN1_0 [label = "400kB" fontsize=8 shape=box3d tooltip="31.25MB"] +N1 -> NN1_0 [label=" 31.25MB" weight=100 tooltip="31.25MB" labeltooltip="31.25MB"] +N2 [label="line3000\nfile3000.src\n0 of 98.63MB (100%)" fontsize=8 shape=box tooltip="line3000 testdata/file3000.src (98.63MB)" color="#b20000" fillcolor="#edd5d5"] +N3 [label="line2001\nfile2000.src\n62.50MB (63.37%)\nof 63.48MB (64.36%)" fontsize=24 shape=box tooltip="line2001 testdata/file2000.src (63.48MB)" color="#b21600" fillcolor="#edd8d5"] +NN3_0 [label = "1.56MB" fontsize=8 shape=box3d tooltip="62.50MB"] +N3 -> NN3_0 [label=" 62.50MB" weight=100 tooltip="62.50MB" labeltooltip="62.50MB"] +N4 [label="line2000\nfile2000.src\n0 of 63.48MB (64.36%)" fontsize=8 shape=box tooltip="line2000 testdata/file2000.src (63.48MB)" color="#b21600" fillcolor="#edd8d5"] +N5 [label="line3001\nfile3000.src\n0 of 36.13MB (36.63%)" fontsize=8 shape=box tooltip="line3001 testdata/file3000.src (36.13MB)" color="#b22e00" fillcolor="#eddbd5"] +N4 -> N3 [label=" 63.48MB\n (inline)" weight=65 penwidth=4 color="#b21600" tooltip="line2000 testdata/file2000.src -> line2001 testdata/file2000.src (63.48MB)" labeltooltip="line2000 testdata/file2000.src -> line2001 testdata/file2000.src (63.48MB)"] +N1 -> N4 [label=" 63.48MB" weight=65 penwidth=4 color="#b21600" tooltip="line3002 testdata/file3000.src -> line2000 testdata/file2000.src (63.48MB)" labeltooltip="line3002 testdata/file3000.src -> line2000 testdata/file2000.src (63.48MB)" minlen=2] +N2 -> N1 [label=" 62.50MB\n (inline)" weight=64 penwidth=4 color="#b21600" tooltip="line3000 testdata/file3000.src -> line3002 testdata/file3000.src (62.50MB)" labeltooltip="line3000 testdata/file3000.src -> line3002 testdata/file3000.src (62.50MB)"] +N2 -> N5 [label=" 36.13MB\n (inline)" weight=37 penwidth=2 color="#b22e00" tooltip="line3000 testdata/file3000.src -> line3001 testdata/file3000.src (36.13MB)" labeltooltip="line3000 testdata/file3000.src -> line3001 testdata/file3000.src (36.13MB)"] +N5 -> N1 [label=" 32.23MB\n (inline)" weight=33 penwidth=2 color="#b23200" tooltip="line3001 testdata/file3000.src -> line3002 testdata/file3000.src (32.23MB)" labeltooltip="line3001 testdata/file3000.src -> line3002 testdata/file3000.src (32.23MB)"] +} diff --git a/src/internal/driver/testdata/pprof.heap_alloc.flat.alloc_space.dot.hide b/src/internal/driver/testdata/pprof.heap_alloc.flat.alloc_space.dot.hide new file mode 100644 index 00000000..6cf8aeca --- /dev/null +++ b/src/internal/driver/testdata/pprof.heap_alloc.flat.alloc_space.dot.hide @@ -0,0 +1,11 @@ +digraph "." { +node [style=filled fillcolor="#f8f8f8"] +subgraph cluster_L { "Build ID: buildid" [shape=box fontsize=16 label="Build ID: buildid\lType: alloc_space\lShowing nodes accounting for 93.75MB, 95.05% of 98.63MB total\lDropped 1 node (cum <= 4.93MB)\l"] } +N1 [label="line3000\nfile3000.src\n62.50MB (63.37%)\nof 98.63MB (100%)" fontsize=24 shape=box tooltip="line3000 testdata/file3000.src (98.63MB)" color="#b20000" fillcolor="#edd5d5"] +NN1_0 [label = "1.56MB" fontsize=8 shape=box3d tooltip="62.50MB"] +N1 -> NN1_0 [label=" 62.50MB" weight=100 tooltip="62.50MB" labeltooltip="62.50MB"] +N2 [label="line3001\nfile3000.src\n31.25MB (31.68%)\nof 36.13MB (36.63%)" fontsize=20 shape=box tooltip="line3001 testdata/file3000.src (36.13MB)" color="#b22e00" fillcolor="#eddbd5"] +NN2_0 [label = "400kB" fontsize=8 shape=box3d tooltip="31.25MB"] +N2 -> NN2_0 [label=" 31.25MB" weight=100 tooltip="31.25MB" labeltooltip="31.25MB"] +N1 -> N2 [label=" 36.13MB\n (inline)" weight=37 penwidth=2 color="#b22e00" tooltip="line3000 testdata/file3000.src -> line3001 testdata/file3000.src (36.13MB)" labeltooltip="line3000 testdata/file3000.src -> line3001 testdata/file3000.src (36.13MB)" minlen=2] +} diff --git a/src/internal/driver/testdata/pprof.unknown.flat.functions.text b/src/internal/driver/testdata/pprof.unknown.flat.functions.text new file mode 100644 index 00000000..0807ed23 --- /dev/null +++ b/src/internal/driver/testdata/pprof.unknown.flat.functions.text @@ -0,0 +1,8 @@ +Showing nodes accounting for 1.12s, 100% of 1.12s total + flat flat% sum% cum cum% + 1.10s 98.21% 98.21% 1.10s 98.21% line1000 testdata/file1000.src + 0.01s 0.89% 99.11% 1.01s 90.18% line2001 testdata/file2000.src (inline) + 0.01s 0.89% 100% 1.02s 91.07% line3002 testdata/file3000.src (inline) + 0 0% 100% 1.01s 90.18% line2000 testdata/file2000.src + 0 0% 100% 1.12s 100% line3000 testdata/file3000.src + 0 0% 100% 1.11s 99.11% line3001 testdata/file3000.src (inline) diff --git a/src/internal/driver/testdata/wrapper/addr2line b/src/internal/driver/testdata/wrapper/addr2line new file mode 100755 index 00000000..ecbea0b5 --- /dev/null +++ b/src/internal/driver/testdata/wrapper/addr2line @@ -0,0 +1,85 @@ +#!/bin/bash +# Copyright 2014 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# addr2line stub for testing of addr2liner. +# Will recognize (and ignore) the -aiCfej options. +# +# Accepts addresses 1000 to 9000 and output multiple frames of the form: +# 0x9000/fun9000/file9000:9000 +# 0x8000/fun8000/file8000:8000 +# 0x7000/fun7000/file7000:7000 +# ... +# 0x1000/fun1000/file1000:1000 +# +# Returns ??/??/??:0 for all other inputs. + +while getopts aiCfe:j: opt; do + case "$opt" in + a|i|C|f|e|j) ;; + *) + echo "unrecognized option: $1" >&2 + exit 1 + esac +done + +while read input +do + address="$input" + + # remove 0x from input. + case "${address}" in + 0x*) + address=$(printf '%x' "$address") + ;; + *) + address=$(printf '%x' "0x$address") + esac + + printf '0x%x\n' "0x$address" + loop=1 + while [ $loop -eq 1 ] + do + # prepare default output. + output2="fun${address}" + output3="file${address}:${address}" + + # specialize output for selected cases. + case "${address}" in + 1000) + output2="_Z3fooid.clone2" + loop=0 + ;; + 2000) + output2="_ZNSaIiEC1Ev.clone18" + address=1000 + ;; + 3000) + output2="_ZNSt6vectorIS_IS_IiSaIiEESaIS1_EESaIS3_EEixEm" + address=2000 + ;; + [4-9]000) + address=$(expr ${address} - 1000) + ;; + *) + output2='??' + output3='??:0' + loop=0 + esac + + echo "$output2" + echo "$output3" + done +done +exit 0 diff --git a/src/internal/driver/testdata/wrapper/dot b/src/internal/driver/testdata/wrapper/dot new file mode 100755 index 00000000..96692999 --- /dev/null +++ b/src/internal/driver/testdata/wrapper/dot @@ -0,0 +1,29 @@ +#!/bin/bash +# Copyright 2014 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +case "$1" in + "-Tsvg" ) + if ! grep -q 'Type: cpu.*Duration: 10s' ; then + echo "Couldn't recognize dot input" >&2 + exit 1 + fi + cat testdata/cppbench.svg + exit 0 + ;; + * ) + echo "Unexpected argument $1" >&2 + exit 1 + ;; +esac diff --git a/src/internal/driver/testdata/wrapper/nm b/src/internal/driver/testdata/wrapper/nm new file mode 100755 index 00000000..c2bcbea7 --- /dev/null +++ b/src/internal/driver/testdata/wrapper/nm @@ -0,0 +1,62 @@ +#!/bin/bash +# Copyright 2014 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# nm stub for testing of listing. +# Will recognize (and ignore) the -nC options. +# +# Outputs fixed nm output. + +while getopts nC opt; do + case "$opt" in + n) ;; + C) demangle=1;; + *) + echo "unrecognized option: $1" >&2 + exit 1 + esac +done + +if [ $demangle ] +then + cat < uint64(maxNoteSize) { + return nil, fmt.Errorf("note name too long (%d bytes)", namesz) + } + var name string + if namesz > 0 { + // Documentation differs as to whether namesz is meant to include the + // trailing zero, but everyone agrees that name is null-terminated. + // So we'll just determine the actual length after the fact. + var err error + name, err = r.ReadString('\x00') + if err == io.EOF { + return nil, fmt.Errorf("missing note name (want %d bytes)", namesz) + } else if err != nil { + return nil, err + } + namesz = uint32(len(name)) + name = name[:len(name)-1] + } + + // Drop padding bytes until the desc field. + for n := padding(len(noteHeader) + int(namesz)); n > 0; n-- { + if _, err := r.ReadByte(); err == io.EOF { + return nil, fmt.Errorf( + "missing %d bytes of padding after note name", n) + } else if err != nil { + return nil, err + } + } + + if uint64(descsz) > uint64(maxNoteSize) { + return nil, fmt.Errorf("note desc too long (%d bytes)", descsz) + } + desc := make([]byte, int(descsz)) + if _, err := io.ReadFull(r, desc); err == io.EOF { + return nil, fmt.Errorf("missing desc (want %d bytes)", len(desc)) + } else if err != nil { + return nil, err + } + + notes = append(notes, elfNote{Name: name, Desc: desc, Type: typ}) + + // Drop padding bytes until the next note or the end of the section, + // whichever comes first. + for n := padding(len(desc)); n > 0; n-- { + if _, err := r.ReadByte(); err == io.EOF { + // We hit the end of the section before an alignment boundary. + // This can happen if this section is at the end of the file or the next + // section has a smaller alignment requirement. + break + } else if err != nil { + return nil, err + } + } + } + return notes, nil +} + +// GetBuildID returns the GNU build-ID for an ELF binary. +// +// If no build-ID was found but the binary was read without error, it returns +// (nil, nil). +func GetBuildID(binary io.ReaderAt) ([]byte, error) { + f, err := elf.NewFile(binary) + if err != nil { + return nil, err + } + + findBuildID := func(notes []elfNote) ([]byte, error) { + var buildID []byte + for _, note := range notes { + if note.Name == "GNU" && note.Type == noteTypeGNUBuildID { + if buildID == nil { + buildID = note.Desc + } else { + return nil, fmt.Errorf("multiple build ids found, don't know which to use!") + } + } + } + return buildID, nil + } + + for _, p := range f.Progs { + if p.Type != elf.PT_NOTE { + continue + } + notes, err := parseNotes(p.Open(), int(p.Align), f.ByteOrder) + if err != nil { + return nil, err + } + if b, err := findBuildID(notes); b != nil || err != nil { + return b, err + } + } + for _, s := range f.Sections { + if s.Type != elf.SHT_NOTE { + continue + } + notes, err := parseNotes(s.Open(), int(s.Addralign), f.ByteOrder) + if err != nil { + return nil, err + } + if b, err := findBuildID(notes); b != nil || err != nil { + return b, err + } + } + return nil, nil +} + +// GetBase determines the base address to subtract from virtual +// address to get symbol table address. For an executable, the base +// is 0. Otherwise, it's a shared library, and the base is the +// address where the mapping starts. The kernel is special, and may +// use the address of the _stext symbol as the mmap start. _stext +// offset can be obtained with `nm vmlinux | grep _stext` +func GetBase(fh *elf.FileHeader, loadSegment *elf.ProgHeader, stextOffset *uint64, start, limit, offset uint64) (uint64, error) { + const pageSize = 4096 + + if start == 0 && offset == 0 && + (limit == ^uint64(0) || limit == 0) { + // Some tools may introduce a fake mapping that spans the entire + // address space. Assume that the address has already been + // adjusted, so no additional base adjustment is necessary. + return 0, nil + } + + switch fh.Type { + case elf.ET_EXEC: + if loadSegment == nil { + // Fixed-address executable, no adjustment. + return 0, nil + } + if start == 0 && limit != 0 { + // ChromeOS remaps its kernel to 0. Nothing else should come + // down this path. Empirical values: + // VADDR=0xffffffff80200000 + // stextOffset=0xffffffff80200198 + if stextOffset != nil { + return -*stextOffset, nil + } + return -loadSegment.Vaddr, nil + } + if loadSegment.Vaddr-loadSegment.Off == start-offset { + return offset, nil + } + if loadSegment.Vaddr == start-offset { + return offset, nil + } + if start > loadSegment.Vaddr && limit > start && offset == 0 { + // Some kernels look like: + // VADDR=0xffffffff80200000 + // stextOffset=0xffffffff80200198 + // Start=0xffffffff83200000 + // Limit=0xffffffff84200000 + // Offset=0 + // So the base should be: + if stextOffset != nil && (start%pageSize) == (*stextOffset%pageSize) { + // perf uses the address of _stext as start. Some tools may + // adjust for this before calling GetBase, in which case the the page + // alignment should be different from that of stextOffset. + return start - *stextOffset, nil + } + + return start - loadSegment.Vaddr, nil + } else if start < loadSegment.Vaddr && start%pageSize != 0 && stextOffset != nil && *stextOffset%pageSize == start%pageSize { + // ChromeOS remaps its kernel to 0 + start%pageSize. Nothing + // else should come down this path. Empirical values: + // start=0x198 limit=0x2f9fffff offset=0 + // VADDR=0xffffffff81000000 + // stextOffset=0xffffffff81000198 + return -(*stextOffset - start), nil + } + + return 0, fmt.Errorf("Don't know how to handle EXEC segment: %v start=0x%x limit=0x%x offset=0x%x", *loadSegment, start, limit, offset) + case elf.ET_REL: + if offset != 0 { + return 0, fmt.Errorf("Don't know how to handle mapping.Offset") + } + return start, nil + case elf.ET_DYN: + if offset != 0 { + return 0, fmt.Errorf("Don't know how to handle mapping.Offset") + } + if loadSegment == nil { + return start, nil + } + return start - loadSegment.Vaddr, nil + } + return 0, fmt.Errorf("Don't know how to handle FileHeader.Type %v", fh.Type) +} diff --git a/src/internal/elfexec/elfexec_test.go b/src/internal/elfexec/elfexec_test.go new file mode 100644 index 00000000..d8ffd445 --- /dev/null +++ b/src/internal/elfexec/elfexec_test.go @@ -0,0 +1,87 @@ +// Copyright 2014 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package elfexec + +import ( + "debug/elf" + "testing" +) + +func TestGetBase(t *testing.T) { + + fhExec := &elf.FileHeader{ + Type: elf.ET_EXEC, + } + fhRel := &elf.FileHeader{ + Type: elf.ET_REL, + } + fhDyn := &elf.FileHeader{ + Type: elf.ET_DYN, + } + lsOffset := &elf.ProgHeader{ + Vaddr: 0x400000, + Off: 0x200000, + } + kernelHeader := &elf.ProgHeader{ + Vaddr: 0xffffffff81000000, + } + + testcases := []struct { + label string + fh *elf.FileHeader + loadSegment *elf.ProgHeader + stextOffset *uint64 + start, limit, offset uint64 + want uint64 + wanterr bool + }{ + {"exec", fhExec, nil, nil, 0x400000, 0, 0, 0, false}, + {"exec offset", fhExec, lsOffset, nil, 0x400000, 0x800000, 0, 0, false}, + {"exec offset 2", fhExec, lsOffset, nil, 0x200000, 0x600000, 0, 0, false}, + {"exec nomap", fhExec, nil, nil, 0, 0, 0, 0, false}, + {"exec kernel", fhExec, kernelHeader, uint64p(0xffffffff81000198), 0xffffffff82000198, 0xffffffff83000198, 0, 0x1000000, false}, + {"exec chromeos kernel", fhExec, kernelHeader, uint64p(0xffffffff81000198), 0, 0x10197, 0, 0x7efffe68, false}, + {"exec chromeos kernel 2", fhExec, kernelHeader, uint64p(0xffffffff81000198), 0, 0x10198, 0, 0x7efffe68, false}, + {"exec chromeos kernel 3", fhExec, kernelHeader, uint64p(0xffffffff81000198), 0x198, 0x100000, 0, 0x7f000000, false}, + {"exec chromeos kernel 4", fhExec, kernelHeader, uint64p(0xffffffff81200198), 0x198, 0x100000, 0, 0x7ee00000, false}, + {"dyn", fhDyn, nil, nil, 0x200000, 0x300000, 0, 0x200000, false}, + {"dyn offset", fhDyn, lsOffset, nil, 0x0, 0x300000, 0, 0xFFFFFFFFFFC00000, false}, + {"dyn nomap", fhDyn, nil, nil, 0x0, 0x0, 0, 0, false}, + {"rel", fhRel, nil, nil, 0x2000000, 0x3000000, 0, 0x2000000, false}, + {"rel nomap", fhRel, nil, nil, 0x0, ^uint64(0), 0, 0, false}, + {"rel offset", fhRel, nil, nil, 0x100000, 0x200000, 0x1, 0, true}, + } + + for _, tc := range testcases { + base, err := GetBase(tc.fh, tc.loadSegment, tc.stextOffset, tc.start, tc.limit, tc.offset) + if err != nil { + if !tc.wanterr { + t.Errorf("%s: want no error, got %v", tc.label, err) + } + continue + } + if tc.wanterr { + t.Errorf("%s: want error, got nil", tc.label) + continue + } + if base != tc.want { + t.Errorf("%s: want %x, got %x", tc.label, tc.want, base) + } + } +} + +func uint64p(n uint64) *uint64 { + return &n +} diff --git a/src/internal/graph/dotgraph.go b/src/internal/graph/dotgraph.go new file mode 100644 index 00000000..1f419524 --- /dev/null +++ b/src/internal/graph/dotgraph.go @@ -0,0 +1,469 @@ +// Copyright 2014 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package graph + +import ( + "fmt" + "io" + "math" + "path/filepath" + "strings" + + "internal/measurement" +) + +// DotAttributes contains details about the graph itself, giving +// insight into how its elements should be rendered. +type DotAttributes struct { + Nodes map[*Node]*DotNodeAttributes // A map allowing each Node to have its own visualization option +} + +// DotNodeAttributes contains Node specific visualization options. +type DotNodeAttributes struct { + Shape string // The optional shape of the node when rendered visually + Bold bool // If the node should be bold or not + Peripheries int // An optional number of borders to place around a node + URL string // An optional url link to add to a node + Formatter func(*NodeInfo) string // An optional formatter for the node's label +} + +// DotConfig contains attributes about how a graph should be +// constructed and how it should look. +type DotConfig struct { + Title string // The title of the DOT graph + Labels []string // The labels for the DOT's legend + + FormatValue func(int64) string // A formatting function for values + Total int64 // The total weight of the graph, used to compute percentages +} + +// Compose creates and writes a in the DOT format to the writer, using +// the configurations given. +func ComposeDot(w io.Writer, g *Graph, a *DotAttributes, c *DotConfig) { + builder := &builder{w, a, c} + + // Begin constructing DOT by adding a title and legend. + builder.start() + defer builder.finish() + builder.addLegend() + + if len(g.Nodes) == 0 { + return + } + + // Preprocess graph to get id map and find max flat. + nodeIDMap := make(map[*Node]int) + hasNodelets := make(map[*Node]bool) + + maxFlat := float64(abs64(g.Nodes[0].Flat)) + for i, n := range g.Nodes { + nodeIDMap[n] = i + 1 + if float64(abs64(n.Flat)) > maxFlat { + maxFlat = float64(abs64(n.Flat)) + } + } + + edges := EdgeMap{} + + // Add nodes and nodelets to DOT builder. + for _, n := range g.Nodes { + builder.addNode(n, nodeIDMap[n], maxFlat) + hasNodelets[n] = builder.addNodelets(n, nodeIDMap[n]) + + // Collect all edges. Use a fake node to support multiple incoming edges. + for _, e := range n.Out { + edges[&Node{}] = e + } + } + + // Add edges to DOT builder. Sort edges by frequency as a hint to the graph layout engine. + for _, e := range edges.Sort() { + builder.addEdge(e, nodeIDMap[e.Src], nodeIDMap[e.Dest], hasNodelets[e.Src]) + } +} + +// builder wraps an io.Writer and understands how to compose DOT formatted elements. +type builder struct { + io.Writer + attributes *DotAttributes + config *DotConfig +} + +// start generates a title and initial node in DOT format. +func (b *builder) start() { + graphname := "unnamed" + if b.config.Title != "" { + graphname = b.config.Title + } + fmt.Fprintln(b, `digraph "`+graphname+`" {`) + fmt.Fprintln(b, `node [style=filled fillcolor="#f8f8f8"]`) +} + +// finish closes the opening curly bracket in the constructed DOT buffer. +func (b *builder) finish() { + fmt.Fprintln(b, "}") +} + +// addLegend generates a legend in DOT format. +func (b *builder) addLegend() { + labels := b.config.Labels + var title string + if len(labels) > 0 { + title = labels[0] + } + fmt.Fprintf(b, `subgraph cluster_L { "%s" [shape=box fontsize=16 label="%s\l"] }`+"\n", title, strings.Join(labels, `\l`)) +} + +// addNode generates a graph node in DOT format. +func (b *builder) addNode(node *Node, nodeID int, maxFlat float64) { + flat, cum := node.Flat, node.Cum + attrs := b.attributes.Nodes[node] + + // Populate label for node. + var label string + if attrs != nil && attrs.Formatter != nil { + label = attrs.Formatter(&node.Info) + } else { + label = multilinePrintableName(&node.Info) + } + + flatValue := b.config.FormatValue(flat) + if flat != 0 { + label = label + fmt.Sprintf(`%s (%s)`, + flatValue, + strings.TrimSpace(percentage(flat, b.config.Total))) + } else { + label = label + "0" + } + cumValue := flatValue + if cum != flat { + if flat != 0 { + label = label + `\n` + } else { + label = label + " " + } + cumValue = b.config.FormatValue(cum) + label = label + fmt.Sprintf(`of %s (%s)`, + cumValue, + strings.TrimSpace(percentage(cum, b.config.Total))) + } + + // Scale font sizes from 8 to 24 based on percentage of flat frequency. + // Use non linear growth to emphasize the size difference. + baseFontSize, maxFontGrowth := 8, 16.0 + fontSize := baseFontSize + if maxFlat != 0 && flat != 0 && float64(abs64(flat)) <= maxFlat { + fontSize += int(math.Ceil(maxFontGrowth * math.Sqrt(float64(abs64(flat))/maxFlat))) + } + + // Determine node shape. + shape := "box" + if attrs != nil && attrs.Shape != "" { + shape = attrs.Shape + } + + // Create DOT attribute for node. + attr := fmt.Sprintf(`label="%s" fontsize=%d shape=%s tooltip="%s (%s)" color="%s" fillcolor="%s"`, + label, fontSize, shape, node.Info.PrintableName(), cumValue, + dotColor(float64(node.Cum)/float64(abs64(b.config.Total)), false), + dotColor(float64(node.Cum)/float64(abs64(b.config.Total)), true)) + + // Add on extra attributes if provided. + if attrs != nil { + // Make bold if specified. + if attrs.Bold { + attr += ` style="bold,filled"` + } + + // Add peripheries if specified. + if attrs.Peripheries != 0 { + attr += fmt.Sprintf(` peripheries=%d`, attrs.Peripheries) + } + + // Add URL if specified. target="_blank" forces the link to open in a new tab. + if attrs.URL != "" { + attr += fmt.Sprintf(` URL="%s" target="_blank"`, attrs.URL) + } + } + + fmt.Fprintf(b, "N%d [%s]\n", nodeID, attr) +} + +// addNodelets generates the DOT boxes for the node tags if they exist. +func (b *builder) addNodelets(node *Node, nodeID int) bool { + const maxNodelets = 4 // Number of nodelets for alphanumeric labels + const maxNumNodelets = 4 // Number of nodelets for numeric labels + var nodelets string + + // Populate two Tag slices, one for LabelTags and one for NumericTags. + var ts []*Tag + lnts := make(map[string][]*Tag, 0) + for _, t := range node.LabelTags { + ts = append(ts, t) + } + for l, tm := range node.NumericTags { + for _, t := range tm { + lnts[l] = append(lnts[l], t) + } + } + + // For leaf nodes, print cumulative tags (includes weight from + // children that have been deleted). + // For internal nodes, print only flat tags. + flatTags := len(node.Out) > 0 + + // Select the top maxNodelets alphanumeric labels by weight. + SortTags(ts, flatTags) + if len(ts) > maxNodelets { + ts = ts[:maxNodelets] + } + for i, t := range ts { + w := t.Cum + if flatTags { + w = t.Flat + } + if w == 0 { + continue + } + weight := b.config.FormatValue(w) + nodelets += fmt.Sprintf(`N%d_%d [label = "%s" fontsize=8 shape=box3d tooltip="%s"]`+"\n", nodeID, i, t.Name, weight) + nodelets += fmt.Sprintf(`N%d -> N%d_%d [label=" %s" weight=100 tooltip="%s" labeltooltip="%s"]`+"\n", nodeID, nodeID, i, weight, weight, weight) + if nts := lnts[t.Name]; nts != nil { + nodelets += b.numericNodelets(nts, maxNumNodelets, flatTags, fmt.Sprintf(`N%d_%d`, nodeID, i)) + } + } + + if nts := lnts[""]; nts != nil { + nodelets += b.numericNodelets(nts, maxNumNodelets, flatTags, fmt.Sprintf(`N%d`, nodeID)) + } + + fmt.Fprint(b, nodelets) + return nodelets != "" +} + +func (b *builder) numericNodelets(nts []*Tag, maxNumNodelets int, flatTags bool, source string) string { + nodelets := "" + + // Collapse numeric labels into maxNumNodelets buckets, of the form: + // 1MB..2MB, 3MB..5MB, ... + for j, t := range collapsedTags(nts, maxNumNodelets, flatTags) { + w, attr := t.Cum, ` style="dotted"` + if flatTags || t.Flat == t.Cum { + w, attr = t.Flat, "" + } + if w != 0 { + weight := b.config.FormatValue(w) + nodelets += fmt.Sprintf(`N%s_%d [label = "%s" fontsize=8 shape=box3d tooltip="%s"]`+"\n", source, j, t.Name, weight) + nodelets += fmt.Sprintf(`%s -> N%s_%d [label=" %s" weight=100 tooltip="%s" labeltooltip="%s"%s]`+"\n", source, source, j, weight, weight, weight, attr) + } + } + return nodelets +} + +// addEdge generates a graph edge in DOT format. +func (b *builder) addEdge(edge *Edge, from, to int, hasNodelets bool) { + var inline string + if edge.Inline { + inline = `\n (inline)` + } + w := b.config.FormatValue(edge.Weight) + attr := fmt.Sprintf(`label=" %s%s"`, w, inline) + if b.config.Total != 0 { + // Note: edge.weight > b.config.Total is possible for profile diffs. + if weight := 1 + int(min64(abs64(edge.Weight*100/b.config.Total), 100)); weight > 1 { + attr = fmt.Sprintf(`%s weight=%d`, attr, weight) + } + if width := 1 + int(min64(abs64(edge.Weight*5/b.config.Total), 5)); width > 1 { + attr = fmt.Sprintf(`%s penwidth=%d`, attr, width) + } + attr = fmt.Sprintf(`%s color="%s"`, attr, + dotColor(float64(edge.Weight)/float64(abs64(b.config.Total)), false)) + } + arrow := "->" + if edge.Residual { + arrow = "..." + } + tooltip := fmt.Sprintf(`"%s %s %s (%s)"`, + edge.Src.Info.PrintableName(), arrow, edge.Dest.Info.PrintableName(), w) + attr = fmt.Sprintf(`%s tooltip=%s labeltooltip=%s`, attr, tooltip, tooltip) + + if edge.Residual { + attr = attr + ` style="dotted"` + } + + if hasNodelets { + // Separate children further if source has tags. + attr = attr + " minlen=2" + } + + fmt.Fprintf(b, "N%d -> N%d [%s]\n", from, to, attr) +} + +// dotColor returns a color for the given score (between -1.0 and +// 1.0), with -1.0 colored red, 0.0 colored grey, and 1.0 colored +// green. If isBackground is true, then a light (low-saturation) +// color is returned (suitable for use as a background color); +// otherwise, a darker color is returned (suitable for use as a +// foreground color). +func dotColor(score float64, isBackground bool) string { + // A float between 0.0 and 1.0, indicating the extent to which + // colors should be shifted away from grey (to make positive and + // negative values easier to distinguish, and to make more use of + // the color range.) + const shift = 0.7 + + // Saturation and value (in hsv colorspace) for background colors. + const bgSaturation = 0.1 + const bgValue = 0.93 + + // Saturation and value (in hsv colorspace) for foreground colors. + const fgSaturation = 1.0 + const fgValue = 0.7 + + // Choose saturation and value based on isBackground. + var saturation float64 + var value float64 + if isBackground { + saturation = bgSaturation + value = bgValue + } else { + saturation = fgSaturation + value = fgValue + } + + // Limit the score values to the range [-1.0, 1.0]. + score = math.Max(-1.0, math.Min(1.0, score)) + + // Reduce saturation near score=0 (so it is colored grey, rather than yellow). + if math.Abs(score) < 0.2 { + saturation *= math.Abs(score) / 0.2 + } + + // Apply 'shift' to move scores away from 0.0 (grey). + if score > 0.0 { + score = math.Pow(score, (1.0 - shift)) + } + if score < 0.0 { + score = -math.Pow(-score, (1.0 - shift)) + } + + var r, g, b float64 // red, green, blue + if score < 0.0 { + g = value + r = value * (1 + saturation*score) + } else { + r = value + g = value * (1 - saturation*score) + } + b = value * (1 - saturation) + return fmt.Sprintf("#%02x%02x%02x", uint8(r*255.0), uint8(g*255.0), uint8(b*255.0)) +} + +// percentage computes the percentage of total of a value, and encodes +// it as a string. At least two digits of precision are printed. +func percentage(value, total int64) string { + var ratio float64 + if total != 0 { + ratio = math.Abs(float64(value)/float64(total)) * 100 + } + switch { + case math.Abs(ratio) >= 99.95 && math.Abs(ratio) <= 100.05: + return " 100%" + case math.Abs(ratio) >= 1.0: + return fmt.Sprintf("%5.2f%%", ratio) + default: + return fmt.Sprintf("%5.2g%%", ratio) + } +} + +func multilinePrintableName(info *NodeInfo) string { + infoCopy := *info + infoCopy.Name = strings.Replace(infoCopy.Name, "::", `\n`, -1) + infoCopy.Name = strings.Replace(infoCopy.Name, ".", `\n`, -1) + if infoCopy.File != "" { + infoCopy.File = filepath.Base(infoCopy.File) + } + return strings.Join(infoCopy.NameComponents(), `\n`) + `\n` +} + +// collapsedTags trims and sorts a slice of tags. +func collapsedTags(ts []*Tag, count int, flatTags bool) []*Tag { + ts = SortTags(ts, flatTags) + if len(ts) <= count { + return ts + } + + tagGroups := make([][]*Tag, count) + for i, t := range (ts)[:count] { + tagGroups[i] = []*Tag{t} + } + for _, t := range (ts)[count:] { + g, d := 0, tagDistance(t, tagGroups[0][0]) + for i := 1; i < count; i++ { + if nd := tagDistance(t, tagGroups[i][0]); nd < d { + g, d = i, nd + } + } + tagGroups[g] = append(tagGroups[g], t) + } + + var nts []*Tag + for _, g := range tagGroups { + l, w, c := tagGroupLabel(g) + nts = append(nts, &Tag{ + Name: l, + Flat: w, + Cum: c, + }) + } + return SortTags(nts, flatTags) +} + +func tagDistance(t, u *Tag) float64 { + v, _ := measurement.Scale(u.Value, u.Unit, t.Unit) + if v < float64(t.Value) { + return float64(t.Value) - v + } + return v - float64(t.Value) +} + +func tagGroupLabel(g []*Tag) (label string, flat, cum int64) { + if len(g) == 1 { + t := g[0] + return measurement.Label(t.Value, t.Unit), t.Flat, t.Cum + } + min := g[0] + max := g[0] + f := min.Flat + c := min.Cum + for _, t := range g[1:] { + if v, _ := measurement.Scale(t.Value, t.Unit, min.Unit); int64(v) < min.Value { + min = t + } + if v, _ := measurement.Scale(t.Value, t.Unit, max.Unit); int64(v) > max.Value { + max = t + } + f += t.Flat + c += t.Cum + } + return measurement.Label(min.Value, min.Unit) + ".." + measurement.Label(max.Value, max.Unit), f, c +} + +func min64(a, b int64) int64 { + if a < b { + return a + } + return b +} diff --git a/src/internal/graph/dotgraph_test.go b/src/internal/graph/dotgraph_test.go new file mode 100644 index 00000000..c3cbc96b --- /dev/null +++ b/src/internal/graph/dotgraph_test.go @@ -0,0 +1,276 @@ +// Copyright 2014 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package graph + +import ( + "bytes" + "fmt" + "io/ioutil" + "reflect" + "strconv" + "strings" + "testing" + + "internal/proftest" +) + +const path = "testdata/" + +func TestComposeWithStandardGraph(t *testing.T) { + g := baseGraph() + a, c := baseAttrsAndConfig() + + var buf bytes.Buffer + ComposeDot(&buf, g, a, c) + + want, err := ioutil.ReadFile(path + "compose1.dot") + if err != nil { + t.Fatalf("error reading test file: %v", err) + } + + compareGraphs(t, buf.Bytes(), want) +} + +func TestComposeWithNodeAttributesAndZeroFlat(t *testing.T) { + g := baseGraph() + a, c := baseAttrsAndConfig() + + // Set NodeAttributes for Node 1. + a.Nodes[g.Nodes[0]] = &DotNodeAttributes{ + Shape: "folder", + Bold: true, + Peripheries: 2, + URL: "www.google.com", + Formatter: func(ni *NodeInfo) string { + return strings.ToUpper(ni.Name) + }, + } + + // Set Flat value to zero on Node 2. + g.Nodes[1].Flat = 0 + + var buf bytes.Buffer + ComposeDot(&buf, g, a, c) + + want, err := ioutil.ReadFile(path + "compose2.dot") + if err != nil { + t.Fatalf("error reading test file: %v", err) + } + + compareGraphs(t, buf.Bytes(), want) +} + +func TestComposeWithTagsAndResidualEdge(t *testing.T) { + g := baseGraph() + a, c := baseAttrsAndConfig() + + // Add tags to Node 1. + g.Nodes[0].LabelTags["a"] = &Tag{ + Name: "tag1", + Cum: 10, + Flat: 10, + } + g.Nodes[0].NumericTags[""] = TagMap{ + "b": &Tag{ + Name: "tag2", + Cum: 20, + Flat: 20, + Unit: "ms", + }, + } + + // Set edge to be Residual. + g.Nodes[0].Out[g.Nodes[1]].Residual = true + + var buf bytes.Buffer + ComposeDot(&buf, g, a, c) + + want, err := ioutil.ReadFile(path + "compose3.dot") + if err != nil { + t.Fatalf("error reading test file: %v", err) + } + + compareGraphs(t, buf.Bytes(), want) +} + +func TestComposeWithNestedTags(t *testing.T) { + g := baseGraph() + a, c := baseAttrsAndConfig() + + // Add tags to Node 1. + g.Nodes[0].LabelTags["tag1"] = &Tag{ + Name: "tag1", + Cum: 10, + Flat: 10, + } + g.Nodes[0].NumericTags["tag1"] = TagMap{ + "tag2": &Tag{ + Name: "tag2", + Cum: 20, + Flat: 20, + Unit: "ms", + }, + } + + var buf bytes.Buffer + ComposeDot(&buf, g, a, c) + + want, err := ioutil.ReadFile(path + "compose5.dot") + if err != nil { + t.Fatalf("error reading test file: %v", err) + } + + compareGraphs(t, buf.Bytes(), want) +} + +func TestComposeWithEmptyGraph(t *testing.T) { + g := &Graph{} + a, c := baseAttrsAndConfig() + + var buf bytes.Buffer + ComposeDot(&buf, g, a, c) + + want, err := ioutil.ReadFile(path + "compose4.dot") + if err != nil { + t.Fatalf("error reading test file: %v", err) + } + + compareGraphs(t, buf.Bytes(), want) +} + +func baseGraph() *Graph { + src := &Node{ + Info: NodeInfo{Name: "src"}, + Flat: 10, + Cum: 25, + In: make(EdgeMap), + Out: make(EdgeMap), + LabelTags: make(TagMap), + NumericTags: make(map[string]TagMap), + } + dest := &Node{ + Info: NodeInfo{Name: "dest"}, + Flat: 15, + Cum: 25, + In: make(EdgeMap), + Out: make(EdgeMap), + LabelTags: make(TagMap), + NumericTags: make(map[string]TagMap), + } + edge := &Edge{ + Src: src, + Dest: dest, + Weight: 10, + } + src.Out[dest] = edge + src.In[src] = edge + return &Graph{ + Nodes: Nodes{ + src, + dest, + }, + } +} + +func baseAttrsAndConfig() (*DotAttributes, *DotConfig) { + a := &DotAttributes{ + Nodes: make(map[*Node]*DotNodeAttributes), + } + c := &DotConfig{ + Title: "testtitle", + Labels: []string{"label1", "label2"}, + Total: 100, + FormatValue: func(v int64) string { + return strconv.FormatInt(v, 10) + }, + } + return a, c +} + +func compareGraphs(t *testing.T, got, want []byte) { + if string(got) != string(want) { + d, err := proftest.Diff(got, want) + if err != nil { + t.Fatalf("error finding diff: %v", err) + } + t.Errorf("Compose incorrectly wrote %s", string(d)) + } +} + +func TestMultilinePrintableName(t *testing.T) { + ni := &NodeInfo{ + Name: "test1.test2::test3", + File: "src/file.cc", + Address: 123, + Lineno: 999, + } + + want := fmt.Sprintf(`%016x\ntest1\ntest2\ntest3\nfile.cc:999\n`, 123) + if got := multilinePrintableName(ni); got != want { + t.Errorf("multilinePrintableName(%#v) == %q, want %q", ni, got, want) + } +} + +func TestTagCollapse(t *testing.T) { + tagSource := []*Tag{ + {"12mb", "mb", 12, 100, 100}, + {"1kb", "kb", 1, 1, 1}, + {"1mb", "mb", 1, 1000, 1000}, + {"2048mb", "mb", 2048, 1000, 1000}, + {"1b", "b", 1, 100, 100}, + {"2b", "b", 2, 100, 100}, + {"7b", "b", 7, 100, 100}, + } + + tagWant := [][]*Tag{ + []*Tag{ + {"1B..2GB", "", 0, 2401, 2401}, + }, + []*Tag{ + {"2GB", "", 0, 1000, 1000}, + {"1B..12MB", "", 0, 1401, 1401}, + }, + []*Tag{ + {"2GB", "", 0, 1000, 1000}, + {"12MB", "", 0, 100, 100}, + {"1B..1MB", "", 0, 1301, 1301}, + }, + []*Tag{ + {"2GB", "", 0, 1000, 1000}, + {"1MB", "", 0, 1000, 1000}, + {"2B..1kB", "", 0, 201, 201}, + {"1B", "", 0, 100, 100}, + {"12MB", "", 0, 100, 100}, + }, + } + + for _, tc := range tagWant { + var got, want []*Tag + got = collapsedTags(tagSource, len(tc), true) + want = SortTags(tc, true) + + if !reflect.DeepEqual(got, want) { + t.Errorf("collapse to %d, got:\n%v\nwant:\n%v", len(tc), tagString(got), tagString(want)) + } + } +} + +func tagString(t []*Tag) string { + var ret []string + for _, s := range t { + ret = append(ret, fmt.Sprintln(s)) + } + return strings.Join(ret, ":") +} diff --git a/src/internal/graph/graph.go b/src/internal/graph/graph.go new file mode 100644 index 00000000..4d19dcf1 --- /dev/null +++ b/src/internal/graph/graph.go @@ -0,0 +1,859 @@ +// Copyright 2014 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package graph collects a set of samples into a directed graph. +package graph + +import ( + "fmt" + "math" + "path/filepath" + "sort" + "strings" + + "profile" +) + +// Graph summarizes a performance profile into a format that is +// suitable for visualization. +type Graph struct { + Nodes Nodes +} + +// Options encodes the options for constructing a graph +type Options struct { + SampleValue func(s []int64) int64 // Function to compute the value of a sample + FormatTag func(int64, string) string // Function to format a sample tag value into a string + ObjNames bool // Always preserve obj filename + + CallTree bool // Build a tree instead of a graph + DropNegative bool // Drop nodes with overall negative values + + KeptNodes NodeSet // If non-nil, only use nodes in this set +} + +// Nodes is an ordered collection of graph nodes. +type Nodes []*Node + +// Node is an entry on a profiling report. It represents a unique +// program location. +type Node struct { + // Information associated to this entry. + Info NodeInfo + + // values associated to this node. + // Flat is exclusive to this node, cum includes all descendents. + Flat, Cum int64 + + // in and out contains the nodes immediately reaching or reached by this nodes. + In, Out EdgeMap + + // tags provide additional information about subsets of a sample. + LabelTags TagMap + + // Numeric tags provide additional values for subsets of a sample. + // Numeric tags are optionally associated to a label tag. The key + // for NumericTags is the name of the LabelTag they are associated + // to, or "" for numeric tags not associated to a label tag. + NumericTags map[string]TagMap +} + +// BumpWeight increases the weight of an edge between two nodes. If +// there isn't such an edge one is created. +func (n *Node) BumpWeight(to *Node, w int64, residual, inline bool) { + if n.Out[to] != to.In[n] { + panic(fmt.Errorf("asymmetric edges %v %v", *n, *to)) + } + + if e := n.Out[to]; e != nil { + e.Weight += w + if residual { + e.Residual = true + } + if !inline { + e.Inline = false + } + return + } + + info := &Edge{Src: n, Dest: to, Weight: w, Residual: residual, Inline: inline} + n.Out[to] = info + to.In[n] = info +} + +// NodeInfo contains the attributes for a node. +type NodeInfo struct { + Name string + OrigName string + Address uint64 + File string + StartLine, Lineno int + Objfile string +} + +// PrintableName calls the Node's Formatter function with a single space separator. +func (i *NodeInfo) PrintableName() string { + return strings.Join(i.NameComponents(), " ") +} + +// NameComponents returns the components of the printable name to be used for a node. +func (i *NodeInfo) NameComponents() []string { + var name []string + if i.Address != 0 { + name = append(name, fmt.Sprintf("%016x", i.Address)) + } + if fun := i.Name; fun != "" { + name = append(name, fun) + } + + switch { + case i.Lineno != 0: + // User requested line numbers, provide what we have. + name = append(name, fmt.Sprintf("%s:%d", i.File, i.Lineno)) + case i.File != "": + // User requested file name, provide it. + name = append(name, i.File) + case i.Name != "": + // User requested function name. It was already included. + case i.Objfile != "": + // Only binary name is available + name = append(name, "["+i.Objfile+"]") + default: + // Do not leave it empty if there is no information at all. + name = append(name, "") + } + return name +} + +// ExtendedNodeInfo extends the NodeInfo with a pointer to a parent node, to +// identify nodes with identical information and different callers. This is +// used when creating call trees. +type ExtendedNodeInfo struct { + NodeInfo + parent *Node +} + +// NodeMap maps from a node info struct to a node. It is used to merge +// report entries with the same info. +type NodeMap map[ExtendedNodeInfo]*Node + +// NodeSet maps is a collection of node info structs. +type NodeSet map[NodeInfo]bool + +// FindOrInsertNode takes the info for a node and either returns a matching node +// from the node map if one exists, or adds one to the map if one does not. +// If parent is non-nil, return a match with the same parent. +// If kept is non-nil, nodes are only added if they can be located on it. +func (m NodeMap) FindOrInsertNode(info NodeInfo, parent *Node, kept NodeSet) *Node { + if kept != nil && !kept[info] { + return nil + } + + extendedInfo := ExtendedNodeInfo{ + info, + parent, + } + + if n := m[extendedInfo]; n != nil { + return n + } + + n := &Node{ + Info: info, + In: make(EdgeMap), + Out: make(EdgeMap), + LabelTags: make(TagMap), + NumericTags: make(map[string]TagMap), + } + m[extendedInfo] = n + return n +} + +// EdgeMap is used to represent the incoming/outgoing edges from a node. +type EdgeMap map[*Node]*Edge + +// Edge contains any attributes to be represented about edges in a graph. +type Edge struct { + Src, Dest *Node + // The summary weight of the edge + Weight int64 + // residual edges connect nodes that were connected through a + // separate node, which has been removed from the report. + Residual bool + // An inline edge represents a call that was inlined into the caller. + Inline bool +} + +// Tag represent sample annotations +type Tag struct { + Name string + Unit string // Describe the value, "" for non-numeric tags + Value int64 + Flat int64 + Cum int64 +} + +// TagMap is a collection of tags, classified by their name. +type TagMap map[string]*Tag + +// SortTags sorts a slice of tags based on their weight. +func SortTags(t []*Tag, flat bool) []*Tag { + ts := tags{t, flat} + sort.Sort(ts) + return ts.t +} + +// New summarizes performance data from a profile into a graph. +func New(prof *profile.Profile, o *Options) (g *Graph) { + locations := NewLocInfo(prof, o.ObjNames) + nm := make(NodeMap) + for _, sample := range prof.Sample { + if sample.Location == nil { + continue + } + + // Construct list of node names for sample. + // Keep track of the index on the Sample for each frame, + // to determine inlining status. + + var stack []NodeInfo + var locIndex []int + for i, loc := range sample.Location { + id := loc.ID + stack = append(stack, locations[id]...) + for _ = range locations[id] { + locIndex = append(locIndex, i) + } + } + + weight := o.SampleValue(sample.Value) + seenEdge := make(map[*Node]map[*Node]bool) + var nn *Node + nlocIndex := -1 + residual := false + // Walk top-down over the frames in a sample, keeping track + // of the current parent if we're building a tree. + for i := len(stack); i > 0; i-- { + var parent *Node + if o.CallTree { + parent = nn + } + n := nm.FindOrInsertNode(stack[i-1], parent, o.KeptNodes) + if n == nil { + residual = true + continue + } + // Add flat weight to leaf node. + if i == 1 { + n.addSample(sample, weight, o.FormatTag, true) + } + // Add cum weight to all nodes in stack, avoiding double counting. + if seenEdge[n] == nil { + seenEdge[n] = make(map[*Node]bool) + n.addSample(sample, weight, o.FormatTag, false) + } + // Update edge weights for all edges in stack, avoiding double counting. + if nn != nil && n != nn && !seenEdge[n][nn] { + seenEdge[n][nn] = true + // This is an inlined edge if the caller and the callee + // correspond to the same entry in the sample. + nn.BumpWeight(n, weight, residual, locIndex[i-1] == nlocIndex) + } + nn = n + nlocIndex = locIndex[i-1] + residual = false + } + } + + // Collect nodes into a graph. + ns := make(Nodes, 0, len(nm)) + for _, n := range nm { + if o.DropNegative && isNegative(n) { + continue + } + ns = append(ns, n) + } + + return &Graph{ns} +} + +// isNegative returns true if the node is considered as "negative" for the +// purposes of drop_negative. +func isNegative(n *Node) bool { + switch { + case n.Flat < 0: + return true + case n.Flat == 0 && n.Cum < 0: + return true + default: + return false + } +} + +// NewLocInfo creates a slice of formatted names for a location. +func NewLocInfo(prof *profile.Profile, keepBinary bool) map[uint64][]NodeInfo { + locations := make(map[uint64][]NodeInfo) + + for _, l := range prof.Location { + var objfile string + + if m := l.Mapping; m != nil { + objfile = filepath.Base(m.File) + } + + if len(l.Line) == 0 { + locations[l.ID] = []NodeInfo{ + { + Address: l.Address, + Objfile: objfile, + }, + } + continue + } + var info []NodeInfo + for _, line := range l.Line { + ni := NodeInfo{ + Address: l.Address, + Lineno: int(line.Line), + } + + if line.Function != nil { + ni.Name = line.Function.Name + ni.OrigName = line.Function.SystemName + ni.File = line.Function.Filename + ni.StartLine = int(line.Function.StartLine) + } + if keepBinary || line.Function == nil { + ni.Objfile = objfile + } + info = append(info, ni) + } + locations[l.ID] = info + } + return locations +} + +type tags struct { + t []*Tag + flat bool +} + +func (t tags) Len() int { return len(t.t) } +func (t tags) Swap(i, j int) { t.t[i], t.t[j] = t.t[j], t.t[i] } +func (t tags) Less(i, j int) bool { + if !t.flat { + if t.t[i].Cum != t.t[j].Cum { + return abs64(t.t[i].Cum) > abs64(t.t[j].Cum) + } + } + if t.t[i].Flat != t.t[j].Flat { + return abs64(t.t[i].Flat) > abs64(t.t[j].Flat) + } + return t.t[i].Name < t.t[j].Name +} + +// Sum adds the Flat and sum values on a report. +func (ns Nodes) Sum() (flat int64, cum int64) { + for _, n := range ns { + flat += n.Flat + cum += n.Cum + } + return +} + +func (n *Node) addSample(s *profile.Sample, value int64, format func(int64, string) string, flat bool) { + // Update sample value + if flat { + n.Flat += value + } else { + n.Cum += value + } + + // Add string tags + var labels []string + for key, vals := range s.Label { + for _, v := range vals { + labels = append(labels, key+":"+v) + } + } + var joinedLabels string + if len(labels) > 0 { + sort.Strings(labels) + joinedLabels = strings.Join(labels, `\n`) + t := n.LabelTags.findOrAddTag(joinedLabels, "", 0) + if flat { + t.Flat += value + } else { + t.Cum += value + } + } + + numericTags := n.NumericTags[joinedLabels] + if numericTags == nil { + numericTags = TagMap{} + n.NumericTags[joinedLabels] = numericTags + } + // Add numeric tags + for key, nvals := range s.NumLabel { + for _, v := range nvals { + var label string + if format != nil { + label = format(v, key) + } else { + label = fmt.Sprintf("%d", v) + } + t := numericTags.findOrAddTag(label, key, v) + if flat { + t.Flat += value + } else { + t.Cum += value + } + } + } +} + +func (m TagMap) findOrAddTag(label, unit string, value int64) *Tag { + l := m[label] + if l == nil { + l = &Tag{ + Name: label, + Unit: unit, + Value: value, + } + m[label] = l + } + return l +} + +// String returns a text representation of a graph, for debugging purposes. +func (g *Graph) String() string { + var s []string + + nodeIndex := make(map[*Node]int, len(g.Nodes)) + + for i, n := range g.Nodes { + nodeIndex[n] = i + 1 + } + + for i, n := range g.Nodes { + name := n.Info.PrintableName() + var in, out []int + + for _, from := range n.In { + in = append(in, nodeIndex[from.Src]) + } + for _, to := range n.Out { + out = append(out, nodeIndex[to.Dest]) + } + s = append(s, fmt.Sprintf("%d: %s[flat=%d cum=%d] %x -> %v ", i+1, name, n.Flat, n.Cum, in, out)) + } + return strings.Join(s, "\n") +} + +// DiscardLowFrequencyNodes returns a set of the nodes at or over a +// specific cum value cutoff. +func (g *Graph) DiscardLowFrequencyNodes(nodeCutoff int64) NodeSet { + return makeNodeSet(g.Nodes, nodeCutoff) +} + +func makeNodeSet(nodes Nodes, nodeCutoff int64) NodeSet { + kept := make(NodeSet, len(nodes)) + for _, n := range nodes { + if abs64(n.Cum) < nodeCutoff { + continue + } + kept[n.Info] = true + } + return kept +} + +// TrimLowFrequencyTags removes tags that have less than +// the specified weight. +func (g *Graph) TrimLowFrequencyTags(tagCutoff int64) { + // Remove nodes with value <= total*nodeFraction + for _, n := range g.Nodes { + n.LabelTags = trimLowFreqTags(n.LabelTags, tagCutoff) + for s, nt := range n.NumericTags { + n.NumericTags[s] = trimLowFreqTags(nt, tagCutoff) + } + } +} + +func trimLowFreqTags(tags TagMap, minValue int64) TagMap { + kept := TagMap{} + for s, t := range tags { + if abs64(t.Flat) >= minValue || abs64(t.Cum) >= minValue { + kept[s] = t + } + } + return kept +} + +// TrimLowFrequencyEdges removes edges that have less than +// the specified weight. Returns the number of edges removed +func (g *Graph) TrimLowFrequencyEdges(edgeCutoff int64) int { + var droppedEdges int + for _, n := range g.Nodes { + for src, e := range n.In { + if abs64(e.Weight) < edgeCutoff { + delete(n.In, src) + delete(src.Out, n) + droppedEdges++ + } + } + } + return droppedEdges +} + +// SortNodes sorts the nodes in a graph based on a specific heuristic. +func (g *Graph) SortNodes(cum bool, visualMode bool) { + // Sort nodes based on requested mode + switch { + case visualMode: + // Specialized sort to produce a more visually-interesting graph + g.Nodes.Sort(EntropyOrder) + case cum: + g.Nodes.Sort(CumNameOrder) + default: + g.Nodes.Sort(FlatNameOrder) + } +} + +// SelectTopNodes returns a set of the top maxNodes nodes in a graph. +func (g *Graph) SelectTopNodes(maxNodes int, visualMode bool) NodeSet { + if maxNodes > 0 { + if visualMode { + var count int + // If generating a visual graph, count tags as nodes. Update + // maxNodes to account for them. + for i, n := range g.Nodes { + if count += countTags(n) + 1; count >= maxNodes { + maxNodes = i + 1 + break + } + } + } + } + if maxNodes > len(g.Nodes) { + maxNodes = len(g.Nodes) + } + return makeNodeSet(g.Nodes[:maxNodes], 0) +} + +// countTags counts the tags with flat count. This underestimates the +// number of tags being displayed, but in practice is close enough. +func countTags(n *Node) int { + count := 0 + for _, e := range n.LabelTags { + if e.Flat != 0 { + count++ + } + } + for _, t := range n.NumericTags { + for _, e := range t { + if e.Flat != 0 { + count++ + } + } + } + return count +} + +// countEdges counts the number of edges below the specified cutoff. +func countEdges(el EdgeMap, cutoff int64) int { + count := 0 + for _, e := range el { + if e.Weight > cutoff { + count++ + } + } + return count +} + +// RemoveRedundantEdges removes residual edges if the destination can +// be reached through another path. This is done to simplify the graph +// while preserving connectivity. +func (g *Graph) RemoveRedundantEdges() { + // Walk the nodes and outgoing edges in reverse order to prefer + // removing edges with the lowest weight. + for i := len(g.Nodes); i > 0; i-- { + n := g.Nodes[i-1] + in := n.In.Sort() + for j := len(in); j > 0; j-- { + e := in[j-1] + if !e.Residual { + // Do not remove edges heavier than a non-residual edge, to + // avoid potential confusion. + break + } + if isRedundant(e) { + delete(e.Src.Out, e.Dest) + delete(e.Dest.In, e.Src) + } + } + } +} + +// isRedundant determines if an edge can be removed without impacting +// connectivity of the whole graph. This is implemented by checking if the +// nodes have a common ancestor after removing the edge. +func isRedundant(e *Edge) bool { + destPred := predecessors(e, e.Dest) + if len(destPred) == 1 { + return false + } + srcPred := predecessors(e, e.Src) + + for n := range srcPred { + if destPred[n] && n != e.Dest { + return true + } + } + return false +} + +// predecessors collects all the predecessors to node n, excluding edge e. +func predecessors(e *Edge, n *Node) map[*Node]bool { + seen := map[*Node]bool{n: true} + queue := []*Node{n} + for len(queue) > 0 { + n := queue[0] + queue = queue[1:] + for _, ie := range n.In { + if e == ie || seen[ie.Src] { + continue + } + seen[ie.Src] = true + queue = append(queue, ie.Src) + } + } + return seen +} + +// nodeSorter is a mechanism used to allow a report to be sorted +// in different ways. +type nodeSorter struct { + rs Nodes + less func(l, r *Node) bool +} + +func (s nodeSorter) Len() int { return len(s.rs) } +func (s nodeSorter) Swap(i, j int) { s.rs[i], s.rs[j] = s.rs[j], s.rs[i] } +func (s nodeSorter) Less(i, j int) bool { return s.less(s.rs[i], s.rs[j]) } + +// Sort reorders a slice of nodes based on the specified ordering +// criteria. The result is sorted in decreasing order for (absolute) +// numeric quantities, alphabetically for text, and increasing for +// addresses. +func (ns Nodes) Sort(o NodeOrder) error { + var s nodeSorter + + switch o { + case FlatNameOrder: + s = nodeSorter{ns, + func(l, r *Node) bool { + if iv, jv := l.Flat, r.Flat; iv != jv { + return abs64(iv) > abs64(jv) + } + if l.Info.PrintableName() != r.Info.PrintableName() { + return l.Info.PrintableName() < r.Info.PrintableName() + } + iv, jv := l.Cum, r.Cum + return abs64(iv) > abs64(jv) + }, + } + case FlatCumNameOrder: + s = nodeSorter{ns, + func(l, r *Node) bool { + if iv, jv := l.Flat, r.Flat; iv != jv { + return abs64(iv) > abs64(jv) + } + if iv, jv := l.Cum, r.Cum; iv != jv { + return abs64(iv) > abs64(jv) + } + return l.Info.PrintableName() < r.Info.PrintableName() + }, + } + case NameOrder: + s = nodeSorter{ns, + func(l, r *Node) bool { + return l.Info.Name < r.Info.Name + }, + } + case FileOrder: + s = nodeSorter{ns, + func(l, r *Node) bool { + return l.Info.File < r.Info.File + }, + } + case AddressOrder: + s = nodeSorter{ns, + func(l, r *Node) bool { + return l.Info.Address < r.Info.Address + }, + } + case CumNameOrder, EntropyOrder: + // Hold scoring for score-based ordering + var score map[*Node]int64 + scoreOrder := func(l, r *Node) bool { + if is, js := score[l], score[r]; is != js { + return abs64(is) > abs64(js) + } + if l.Info.PrintableName() != r.Info.PrintableName() { + return l.Info.PrintableName() < r.Info.PrintableName() + } + return abs64(l.Flat) > abs64(r.Flat) + } + + switch o { + case CumNameOrder: + score = make(map[*Node]int64, len(ns)) + for _, n := range ns { + score[n] = n.Cum + } + s = nodeSorter{ns, scoreOrder} + case EntropyOrder: + score = make(map[*Node]int64, len(ns)) + for _, n := range ns { + score[n] = entropyScore(n) + } + s = nodeSorter{ns, scoreOrder} + } + default: + return fmt.Errorf("report: unrecognized sort ordering: %d", o) + } + sort.Sort(s) + return nil +} + +// entropyScore computes a score for a node representing how important +// it is to include this node on a graph visualization. It is used to +// sort the nodes and select which ones to display if we have more +// nodes than desired in the graph. This number is computed by looking +// at the flat and cum weights of the node and the incoming/outgoing +// edges. The fundamental idea is to penalize nodes that have a simple +// fallthrough from their incoming to the outgoing edge. +func entropyScore(n *Node) int64 { + score := float64(0) + + if len(n.In) == 0 { + score++ // Favor entry nodes + } else { + score += edgeEntropyScore(n, n.In, 0) + } + + if len(n.Out) == 0 { + score++ // Favor leaf nodes + } else { + score += edgeEntropyScore(n, n.Out, n.Flat) + } + + return int64(score*float64(n.Cum)) + n.Flat +} + +// edgeEntropyScore computes the entropy value for a set of edges +// coming in or out of a node. Entropy (as defined in information +// theory) refers to the amount of information encoded by the set of +// edges. A set of edges that have a more interesting distribution of +// samples gets a higher score. +func edgeEntropyScore(n *Node, edges EdgeMap, self int64) float64 { + score := float64(0) + total := self + for _, e := range edges { + if e.Weight > 0 { + total += abs64(e.Weight) + } + } + if total != 0 { + for _, e := range edges { + frac := float64(abs64(e.Weight)) / float64(total) + score += -frac * math.Log2(frac) + } + if self > 0 { + frac := float64(abs64(self)) / float64(total) + score += -frac * math.Log2(frac) + } + } + return score +} + +// NodeOrder sets the ordering for a Sort operation +type NodeOrder int + +// Sorting options for node sort. +const ( + FlatNameOrder NodeOrder = iota + FlatCumNameOrder + CumNameOrder + NameOrder + FileOrder + AddressOrder + EntropyOrder +) + +// Sort returns a slice of the edges in the map, in a consistent +// order. The sort order is first based on the edge weight +// (higher-to-lower) and then by the node names to avoid flakiness. +func (e EdgeMap) Sort() []*Edge { + el := make(edgeList, 0, len(e)) + for _, w := range e { + el = append(el, w) + } + + sort.Sort(el) + return el +} + +// Sum returns the total weight for a set of nodes. +func (e EdgeMap) Sum() int64 { + var ret int64 + for _, edge := range e { + ret += edge.Weight + } + return ret +} + +type edgeList []*Edge + +func (el edgeList) Len() int { + return len(el) +} + +func (el edgeList) Less(i, j int) bool { + if el[i].Weight != el[j].Weight { + return abs64(el[i].Weight) > abs64(el[j].Weight) + } + + from1 := el[i].Src.Info.PrintableName() + from2 := el[j].Src.Info.PrintableName() + if from1 != from2 { + return from1 < from2 + } + + to1 := el[i].Dest.Info.PrintableName() + to2 := el[j].Dest.Info.PrintableName() + + return to1 < to2 +} + +func (el edgeList) Swap(i, j int) { + el[i], el[j] = el[j], el[i] +} + +func abs64(i int64) int64 { + if i < 0 { + return -i + } + return i +} diff --git a/src/internal/graph/testdata/compose1.dot b/src/internal/graph/testdata/compose1.dot new file mode 100644 index 00000000..ceed0253 --- /dev/null +++ b/src/internal/graph/testdata/compose1.dot @@ -0,0 +1,7 @@ +digraph "testtitle" { +node [style=filled fillcolor="#f8f8f8"] +subgraph cluster_L { "label1" [shape=box fontsize=16 label="label1\llabel2\l"] } +N1 [label="src\n10 (10.00%)\nof 25 (25.00%)" fontsize=22 shape=box tooltip="src (25)" color="#b23c00" fillcolor="#edddd5"] +N2 [label="dest\n15 (15.00%)\nof 25 (25.00%)" fontsize=24 shape=box tooltip="dest (25)" color="#b23c00" fillcolor="#edddd5"] +N1 -> N2 [label=" 10" weight=11 color="#b28559" tooltip="src -> dest (10)" labeltooltip="src -> dest (10)"] +} diff --git a/src/internal/graph/testdata/compose2.dot b/src/internal/graph/testdata/compose2.dot new file mode 100644 index 00000000..ee951fe3 --- /dev/null +++ b/src/internal/graph/testdata/compose2.dot @@ -0,0 +1,7 @@ +digraph "testtitle" { +node [style=filled fillcolor="#f8f8f8"] +subgraph cluster_L { "label1" [shape=box fontsize=16 label="label1\llabel2\l"] } +N1 [label="SRC10 (10.00%)\nof 25 (25.00%)" fontsize=24 shape=folder tooltip="src (25)" color="#b23c00" fillcolor="#edddd5" style="bold,filled" peripheries=2 URL="www.google.com" target="_blank"] +N2 [label="dest\n0 of 25 (25.00%)" fontsize=8 shape=box tooltip="dest (25)" color="#b23c00" fillcolor="#edddd5"] +N1 -> N2 [label=" 10" weight=11 color="#b28559" tooltip="src -> dest (10)" labeltooltip="src -> dest (10)"] +} diff --git a/src/internal/graph/testdata/compose3.dot b/src/internal/graph/testdata/compose3.dot new file mode 100644 index 00000000..99a3119b --- /dev/null +++ b/src/internal/graph/testdata/compose3.dot @@ -0,0 +1,11 @@ +digraph "testtitle" { +node [style=filled fillcolor="#f8f8f8"] +subgraph cluster_L { "label1" [shape=box fontsize=16 label="label1\llabel2\l"] } +N1 [label="src\n10 (10.00%)\nof 25 (25.00%)" fontsize=22 shape=box tooltip="src (25)" color="#b23c00" fillcolor="#edddd5"] +N1_0 [label = "tag1" fontsize=8 shape=box3d tooltip="10"] +N1 -> N1_0 [label=" 10" weight=100 tooltip="10" labeltooltip="10"] +NN1_0 [label = "tag2" fontsize=8 shape=box3d tooltip="20"] +N1 -> NN1_0 [label=" 20" weight=100 tooltip="20" labeltooltip="20"] +N2 [label="dest\n15 (15.00%)\nof 25 (25.00%)" fontsize=24 shape=box tooltip="dest (25)" color="#b23c00" fillcolor="#edddd5"] +N1 -> N2 [label=" 10" weight=11 color="#b28559" tooltip="src ... dest (10)" labeltooltip="src ... dest (10)" style="dotted" minlen=2] +} diff --git a/src/internal/graph/testdata/compose4.dot b/src/internal/graph/testdata/compose4.dot new file mode 100644 index 00000000..adc9cc6f --- /dev/null +++ b/src/internal/graph/testdata/compose4.dot @@ -0,0 +1,4 @@ +digraph "testtitle" { +node [style=filled fillcolor="#f8f8f8"] +subgraph cluster_L { "label1" [shape=box fontsize=16 label="label1\llabel2\l"] } +} diff --git a/src/internal/graph/testdata/compose5.dot b/src/internal/graph/testdata/compose5.dot new file mode 100644 index 00000000..352975f5 --- /dev/null +++ b/src/internal/graph/testdata/compose5.dot @@ -0,0 +1,11 @@ +digraph "testtitle" { +node [style=filled fillcolor="#f8f8f8"] +subgraph cluster_L { "label1" [shape=box fontsize=16 label="label1\llabel2\l"] } +N1 [label="src\n10 (10.00%)\nof 25 (25.00%)" fontsize=22 shape=box tooltip="src (25)" color="#b23c00" fillcolor="#edddd5"] +N1_0 [label = "tag1" fontsize=8 shape=box3d tooltip="10"] +N1 -> N1_0 [label=" 10" weight=100 tooltip="10" labeltooltip="10"] +NN1_0_0 [label = "tag2" fontsize=8 shape=box3d tooltip="20"] +N1_0 -> NN1_0_0 [label=" 20" weight=100 tooltip="20" labeltooltip="20"] +N2 [label="dest\n15 (15.00%)\nof 25 (25.00%)" fontsize=24 shape=box tooltip="dest (25)" color="#b23c00" fillcolor="#edddd5"] +N1 -> N2 [label=" 10" weight=11 color="#b28559" tooltip="src -> dest (10)" labeltooltip="src -> dest (10)" minlen=2] +} diff --git a/src/internal/measurement/measurement.go b/src/internal/measurement/measurement.go new file mode 100644 index 00000000..77ed9712 --- /dev/null +++ b/src/internal/measurement/measurement.go @@ -0,0 +1,299 @@ +// Copyright 2014 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package measurement export utility functions to manipulate/format performance profile sample values. +package measurement + +import ( + "fmt" + "strings" + "time" + + "profile" +) + +// ScaleProfiles updates the units in a set of profiles to make them +// compatible. It scales the profiles to the smallest unit to preserve +// data. +func ScaleProfiles(profiles []*profile.Profile) error { + if len(profiles) == 0 { + return nil + } + periodTypes := make([]*profile.ValueType, 0, len(profiles)) + for _, p := range profiles { + if p.PeriodType != nil { + periodTypes = append(periodTypes, p.PeriodType) + } + } + periodType, err := CommonValueType(periodTypes) + if err != nil { + return fmt.Errorf("period type: %v", err) + } + + // Identify common sample types + numSampleTypes := len(profiles[0].SampleType) + for _, p := range profiles[1:] { + if numSampleTypes != len(p.SampleType) { + return fmt.Errorf("inconsistent samples type count: %d != %d", numSampleTypes, len(p.SampleType)) + } + } + sampleType := make([]*profile.ValueType, numSampleTypes) + for i := 0; i < numSampleTypes; i++ { + sampleTypes := make([]*profile.ValueType, len(profiles)) + for j, p := range profiles { + sampleTypes[j] = p.SampleType[i] + } + sampleType[i], err = CommonValueType(sampleTypes) + if err != nil { + return fmt.Errorf("sample types: %v", err) + } + } + + for _, p := range profiles { + if p.PeriodType != nil && periodType != nil { + period, _ := Scale(p.Period, p.PeriodType.Unit, periodType.Unit) + p.Period, p.PeriodType.Unit = int64(period), periodType.Unit + } + ratios := make([]float64, len(p.SampleType)) + for i, st := range p.SampleType { + if sampleType[i] == nil { + ratios[i] = 1 + continue + } + ratios[i], _ = Scale(1, st.Unit, sampleType[i].Unit) + p.SampleType[i].Unit = sampleType[i].Unit + } + if err := p.ScaleN(ratios); err != nil { + return fmt.Errorf("scale: %v", err) + } + } + return nil +} + +// CommonValueType returns the finest type from a set of compatible +// types. +func CommonValueType(ts []*profile.ValueType) (*profile.ValueType, error) { + if len(ts) <= 1 { + return nil, nil + } + minType := ts[0] + for _, t := range ts[1:] { + if !compatibleValueTypes(minType, t) { + return nil, fmt.Errorf("incompatible types: %v %v", *minType, *t) + } + if ratio, _ := Scale(1, t.Unit, minType.Unit); ratio < 1 { + minType = t + } + } + rcopy := *minType + return &rcopy, nil +} + +func compatibleValueTypes(v1, v2 *profile.ValueType) bool { + if v1 == nil || v2 == nil { + return true // No grounds to disqualify. + } + // Remove trailing 's' to permit minor mismatches. + if t1, t2 := strings.TrimSuffix(v1.Type, "s"), strings.TrimSuffix(v2.Type, "s"); t1 != t2 { + return false + } + + return v1.Unit == v2.Unit || + (isTimeUnit(v1.Unit) && isTimeUnit(v2.Unit)) || + (isMemoryUnit(v1.Unit) && isMemoryUnit(v2.Unit)) +} + +// Scale a measurement from an unit to a different unit and returns +// the scaled value and the target unit. The returned target unit +// will be empty if uninteresting (could be skipped). +func Scale(value int64, fromUnit, toUnit string) (float64, string) { + // Avoid infinite recursion on overflow. + if value < 0 && -value > 0 { + v, u := Scale(-value, fromUnit, toUnit) + return -v, u + } + if m, u, ok := memoryLabel(value, fromUnit, toUnit); ok { + return m, u + } + if t, u, ok := timeLabel(value, fromUnit, toUnit); ok { + return t, u + } + // Skip non-interesting units. + switch toUnit { + case "count", "sample", "unit", "minimum", "auto": + return float64(value), "" + default: + return float64(value), toUnit + } +} + +// Label returns the label used to describe a certain measurement. +func Label(value int64, unit string) string { + return ScaledLabel(value, unit, "auto") +} + +// ScaledLabel scales the passed-in measurement (if necessary) and +// returns the label used to describe a float measurement. +func ScaledLabel(value int64, fromUnit, toUnit string) string { + v, u := Scale(value, fromUnit, toUnit) + sv := strings.TrimSuffix(fmt.Sprintf("%.2f", v), ".00") + if sv == "0" || sv == "-0" { + return "0" + } + return sv + u +} + +// isMemoryUnit returns whether a name is recognized as a memory size +// unit. +func isMemoryUnit(unit string) bool { + switch strings.TrimSuffix(strings.ToLower(unit), "s") { + case "byte", "b", "kilobyte", "kb", "megabyte", "mb", "gigabyte", "gb": + return true + } + return false +} + +func memoryLabel(value int64, fromUnit, toUnit string) (v float64, u string, ok bool) { + fromUnit = strings.TrimSuffix(strings.ToLower(fromUnit), "s") + toUnit = strings.TrimSuffix(strings.ToLower(toUnit), "s") + + switch fromUnit { + case "byte", "b": + case "kilobyte", "kb": + value *= 1024 + case "megabyte", "mb": + value *= 1024 * 1024 + case "gigabyte", "gb": + value *= 1024 * 1024 * 1024 + default: + return 0, "", false + } + + if toUnit == "minimum" || toUnit == "auto" { + switch { + case value < 1024: + toUnit = "b" + case value < 1024*1024: + toUnit = "kb" + case value < 1024*1024*1024: + toUnit = "mb" + default: + toUnit = "gb" + } + } + + var output float64 + switch toUnit { + default: + output, toUnit = float64(value), "B" + case "kb", "kbyte", "kilobyte": + output, toUnit = float64(value)/1024, "kB" + case "mb", "mbyte", "megabyte": + output, toUnit = float64(value)/(1024*1024), "MB" + case "gb", "gbyte", "gigabyte": + output, toUnit = float64(value)/(1024*1024*1024), "GB" + } + return output, toUnit, true +} + +// isTimeUnit returns whether a name is recognized as a time unit. +func isTimeUnit(unit string) bool { + unit = strings.ToLower(unit) + if len(unit) > 2 { + unit = strings.TrimSuffix(unit, "s") + } + + switch unit { + case "nanosecond", "ns", "microsecond", "millisecond", "ms", "s", "second", "sec", "hr", "day", "week", "year": + return true + } + return false +} + +func timeLabel(value int64, fromUnit, toUnit string) (v float64, u string, ok bool) { + fromUnit = strings.ToLower(fromUnit) + if len(fromUnit) > 2 { + fromUnit = strings.TrimSuffix(fromUnit, "s") + } + + toUnit = strings.ToLower(toUnit) + if len(toUnit) > 2 { + toUnit = strings.TrimSuffix(toUnit, "s") + } + + var d time.Duration + switch fromUnit { + case "nanosecond", "ns": + d = time.Duration(value) * time.Nanosecond + case "microsecond": + d = time.Duration(value) * time.Microsecond + case "millisecond", "ms": + d = time.Duration(value) * time.Millisecond + case "second", "sec", "s": + d = time.Duration(value) * time.Second + case "cycle": + return float64(value), "", true + default: + return 0, "", false + } + + if toUnit == "minimum" || toUnit == "auto" { + switch { + case d < 1*time.Microsecond: + toUnit = "ns" + case d < 1*time.Millisecond: + toUnit = "us" + case d < 1*time.Second: + toUnit = "ms" + case d < 1*time.Minute: + toUnit = "sec" + case d < 1*time.Hour: + toUnit = "min" + case d < 24*time.Hour: + toUnit = "hour" + case d < 15*24*time.Hour: + toUnit = "day" + case d < 120*24*time.Hour: + toUnit = "week" + default: + toUnit = "year" + } + } + + var output float64 + dd := float64(d) + switch toUnit { + case "ns", "nanosecond": + output, toUnit = dd/float64(time.Nanosecond), "ns" + case "us", "microsecond": + output, toUnit = dd/float64(time.Microsecond), "us" + case "ms", "millisecond": + output, toUnit = dd/float64(time.Millisecond), "ms" + case "min", "minute": + output, toUnit = dd/float64(time.Minute), "mins" + case "hour", "hr": + output, toUnit = dd/float64(time.Hour), "hrs" + case "day": + output, toUnit = dd/float64(24*time.Hour), "days" + case "week", "wk": + output, toUnit = dd/float64(7*24*time.Hour), "wks" + case "year", "yr": + output, toUnit = dd/float64(365*7*24*time.Hour), "yrs" + default: + fallthrough + case "sec", "second", "s": + output, toUnit = dd/float64(time.Second), "s" + } + return output, toUnit, true +} diff --git a/src/internal/plugin/plugin.go b/src/internal/plugin/plugin.go new file mode 100644 index 00000000..96b1904e --- /dev/null +++ b/src/internal/plugin/plugin.go @@ -0,0 +1,186 @@ +// Copyright 2014 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package plugin defines the plugin implementations that the main pprof driver requires. +package plugin + +import ( + "io" + "regexp" + "time" + + "profile" +) + +// Options groups all the optional plugins into pprof. +type Options struct { + Writer Writer + Flagset FlagSet + Fetch Fetcher + Sym Symbolizer + Obj ObjTool + UI UI +} + +// Writer provides a mechanism to write data under a certain name, +// typically a filename. +type Writer interface { + Open(name string) (io.WriteCloser, error) +} + +// A FlagSet creates and parses command-line flags. +// It is similar to the standard flag.FlagSet. +type FlagSet interface { + // Bool, Int, Float64, and String define new flags, + // like the functions of the same name in package flag. + Bool(name string, def bool, usage string) *bool + Int(name string, def int, usage string) *int + Float64(name string, def float64, usage string) *float64 + String(name string, def string, usage string) *string + + // BoolVar, IntVar, Float64Var, and StringVar define new flags referencing + // a given pointer, like the functions of the same name in package flag. + BoolVar(pointer *bool, name string, def bool, usage string) + IntVar(pointer *int, name string, def int, usage string) + Float64Var(pointer *float64, name string, def float64, usage string) + StringVar(pointer *string, name string, def string, usage string) + + // StringList is similar to String but allows multiple values for a + // single flag + StringList(name string, def string, usage string) *[]*string + + // ExtraUsage returns any additional text that should be + // printed after the standard usage message. + // The typical use of ExtraUsage is to show any custom flags + // defined by the specific pprof plugins being used. + ExtraUsage() string + + // Parse initializes the flags with their values for this run + // and returns the non-flag command line arguments. + // If an unknown flag is encountered or there are no arguments, + // Parse should call usage and return nil. + Parse(usage func()) []string +} + +// A Fetcher reads and returns the profile named by src. src can be a +// local file path or a URL. duration and timeout are units specified +// by the end user, or 0 by default. duration refers to the length of +// the profile collection, if applicable, and timeout is the amount of +// time to wait for a profile before returning an error. Returns the +// fetched profile, the URL of the actual source of the profile, or an +// error. +type Fetcher interface { + Fetch(src string, duration, timeout time.Duration) (*profile.Profile, string, error) +} + +// A Symbolizer introduces symbol information into a profile. +type Symbolizer interface { + Symbolize(mode string, srcs MappingSources, prof *profile.Profile) error +} + +// MappingSources map each profile.Mapping to the source of the profile. +// The key is either Mapping.File or Mapping.BuildId. +type MappingSources map[string][]struct { + Source string // URL of the source the mapping was collected from + Start uint64 // delta applied to addresses from this source (to represent Merge adjustments) +} + +// An ObjTool inspects shared libraries and executable files. +type ObjTool interface { + // Open opens the named object file. If the object is a shared + // library, start/limit/offset are the addresses where it is mapped + // into memory in the address space being inspected. + Open(file string, start, limit, offset uint64) (ObjFile, error) + + // Disasm disassembles the named object file, starting at + // the start address and stopping at (before) the end address. + Disasm(file string, start, end uint64) ([]Inst, error) +} + +// An Inst is a single instruction in an assembly listing. +type Inst struct { + Addr uint64 // virtual address of instruction + Text string // instruction text + File string // source file + Line int // source line +} + +// An ObjFile is a single object file: a shared library or executable. +type ObjFile interface { + // Name returns the underlyinf file name, if available + Name() string + + // Base returns the base address to use when looking up symbols in the file. + Base() uint64 + + // BuildID returns the GNU build ID of the file, or an empty string. + BuildID() string + + // SourceLine reports the source line information for a given + // address in the file. Due to inlining, the source line information + // is in general a list of positions representing a call stack, + // with the leaf function first. + SourceLine(addr uint64) ([]Frame, error) + + // Symbols returns a list of symbols in the object file. + // If r is not nil, Symbols restricts the list to symbols + // with names matching the regular expression. + // If addr is not zero, Symbols restricts the list to symbols + // containing that address. + Symbols(r *regexp.Regexp, addr uint64) ([]*Sym, error) + + // Close closes the file, releasing associated resources. + Close() error +} + +// A Frame describes a single line in a source file. +type Frame struct { + Func string // name of function + File string // source file name + Line int // line in file +} + +// A Sym describes a single symbol in an object file. +type Sym struct { + Name []string // names of symbol (many if symbol was dedup'ed) + File string // object file containing symbol + Start uint64 // start virtual address + End uint64 // virtual address of last byte in sym (Start+size-1) +} + +// A UI manages user interactions. +type UI interface { + // Read returns a line of text (a command) read from the user. + // prompt is printed before reading the command. + ReadLine(prompt string) (string, error) + + // Print shows a message to the user. + // It formats the text as fmt.Print would and adds a final \n if not already present. + // For line-based UI, Print writes to standard error. + // (Standard output is reserved for report data.) + Print(...interface{}) + + // PrintErr shows an error message to the user. + // It formats the text as fmt.Print would and adds a final \n if not already present. + // For line-based UI, PrintErr writes to standard error. + PrintErr(...interface{}) + + // IsTerminal returns whether the UI is known to be tied to an + // interactive terminal (as opposed to being redirected to a file). + IsTerminal() bool + + // SetAutoComplete instructs the UI to call complete(cmd) to obtain + // the auto-completion of cmd, if the UI supports auto-completion at all. + SetAutoComplete(complete func(string) string) +} diff --git a/src/internal/proftest/proftest.go b/src/internal/proftest/proftest.go new file mode 100644 index 00000000..fd834a13 --- /dev/null +++ b/src/internal/proftest/proftest.go @@ -0,0 +1,102 @@ +// Copyright 2014 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package proftest provides some utility routines to test other +// packages related to profiles. +package proftest + +import ( + "encoding/json" + "fmt" + "io/ioutil" + "os" + "os/exec" + "testing" +) + +// Diff compares two byte arrays using the diff tool to highlight the +// differences. It is meant for testing purposes to display the +// differences between expected and actual output. +func Diff(b1, b2 []byte) (data []byte, err error) { + f1, err := ioutil.TempFile("", "proto_test") + if err != nil { + return nil, err + } + defer os.Remove(f1.Name()) + defer f1.Close() + + f2, err := ioutil.TempFile("", "proto_test") + if err != nil { + return nil, err + } + defer os.Remove(f2.Name()) + defer f2.Close() + + f1.Write(b1) + f2.Write(b2) + + data, err = exec.Command("diff", "-u", f1.Name(), f2.Name()).CombinedOutput() + if len(data) > 0 { + // diff exits with a non-zero status when the files don't match. + // Ignore that failure as long as we get output. + err = nil + } + return +} + +// EncodeJSON encodes a value into a byte array. This is intended for +// testing purposes. +func EncodeJSON(x interface{}) []byte { + data, err := json.MarshalIndent(x, "", " ") + if err != nil { + panic(err) + } + data = append(data, '\n') + return data +} + +// TestUI implements the plugin.UI interface, triggering test failures +// if more than Ignore errors are printed. +type TestUI struct { + T *testing.T + Ignore int +} + +// ReadLine returns no input, as no input is expected during testing. +func (ui *TestUI) ReadLine(_ string) (string, error) { + return "", fmt.Errorf("no input") +} + +// Print messages are discarded by the test UI. +func (ui *TestUI) Print(args ...interface{}) { +} + +// PrintErr messages may trigger an error failure. A fixed number of +// error messages are permitted when appropriate. +func (ui *TestUI) PrintErr(args ...interface{}) { + if ui.Ignore > 0 { + ui.Ignore-- + return + } + ui.T.Error(args) +} + +// IsTerminal indicates if the UI is an interactive terminal. +func (ui *TestUI) IsTerminal() bool { + return false +} + +// SetAutoComplete is not supported by the test UI. +func (ui *TestUI) SetAutoComplete(_ func(string) string) { +} diff --git a/src/internal/report/report.go b/src/internal/report/report.go new file mode 100644 index 00000000..4e04863c --- /dev/null +++ b/src/internal/report/report.go @@ -0,0 +1,947 @@ +// Copyright 2014 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package report summarizes a performance profile into a +// human-readable report. +package report + +import ( + "fmt" + "io" + "math" + "os" + "path/filepath" + "regexp" + "sort" + "strconv" + "strings" + "time" + + "internal/graph" + "internal/measurement" + "internal/plugin" + "profile" +) + +// Generate generates a report as directed by the Report. +func Generate(w io.Writer, rpt *Report, obj plugin.ObjTool) error { + o := rpt.options + + switch o.OutputFormat { + case Dot: + return printDOT(w, rpt) + case Tree: + return printTree(w, rpt) + case Text: + return printText(w, rpt) + case Traces: + return printTraces(w, rpt) + case Raw: + fmt.Fprint(w, rpt.prof.String()) + return nil + case Tags: + return printTags(w, rpt) + case Proto: + return rpt.prof.Write(w) + case TopProto: + return printTopProto(w, rpt) + case Dis: + return printAssembly(w, rpt, obj) + case List: + return printSource(w, rpt) + case WebList: + return printWebSource(w, rpt, obj) + case Callgrind: + return printCallgrind(w, rpt) + } + return fmt.Errorf("unexpected output format") +} + +// newTrimmedGraph creates a graph for this report, trimmed according +// to the report options. +func (rpt *Report) newTrimmedGraph() (g *graph.Graph, origCount, droppedNodes, droppedEdges int) { + o := rpt.options + + // Build a graph and refine it. On each refinement step we must rebuild the graph from the samples, + // as the graph itself doesn't contain enough information to preserve full precision. + + // First step: Build complete graph to identify low frequency nodes, based on their cum weight. + g = rpt.newGraph(nil) + totalValue, _ := g.Nodes.Sum() + nodeCutoff := abs64(int64(float64(totalValue) * o.NodeFraction)) + edgeCutoff := abs64(int64(float64(totalValue) * o.EdgeFraction)) + + // Filter out nodes with cum value below nodeCutoff. + if nodeCutoff > 0 { + if nodesKept := g.DiscardLowFrequencyNodes(nodeCutoff); len(g.Nodes) != len(nodesKept) { + droppedNodes = len(g.Nodes) - len(nodesKept) + g = rpt.newGraph(nodesKept) + } + } + origCount = len(g.Nodes) + + // Second step: Limit the total number of nodes. Apply specialized heuristics to improve + // visualization when generating dot output. + visualMode := o.OutputFormat == Dot + g.SortNodes(o.CumSort, visualMode) + if nodeCount := o.NodeCount; nodeCount > 0 { + // Remove low frequency tags and edges as they affect selection. + g.TrimLowFrequencyTags(nodeCutoff) + g.TrimLowFrequencyEdges(edgeCutoff) + if nodesKept := g.SelectTopNodes(nodeCount, visualMode); len(nodesKept) != len(g.Nodes) { + g = rpt.newGraph(nodesKept) + g.SortNodes(o.CumSort, visualMode) + } + } + + // Final step: Filter out low frequency tags and edges, and remove redundant edges that clutter + // the graph. + g.TrimLowFrequencyTags(nodeCutoff) + droppedEdges = g.TrimLowFrequencyEdges(edgeCutoff) + if visualMode { + g.RemoveRedundantEdges() + } + return +} + +func (rpt *Report) selectOutputUnit(g *graph.Graph) { + o := rpt.options + + // Select best unit for profile output. + // Find the appropriate units for the smallest non-zero sample + if o.OutputUnit != "minimum" || len(g.Nodes) == 0 { + return + } + var minValue int64 + + for _, n := range g.Nodes { + nodeMin := abs64(n.Flat) + if nodeMin == 0 { + nodeMin = abs64(n.Cum) + } + if nodeMin > 0 && (minValue == 0 || nodeMin < minValue) { + minValue = nodeMin + } + } + maxValue := rpt.total + if minValue == 0 { + minValue = maxValue + } + + if r := o.Ratio; r > 0 && r != 1 { + minValue = int64(float64(minValue) * r) + maxValue = int64(float64(maxValue) * r) + } + + _, minUnit := measurement.Scale(minValue, o.SampleUnit, "minimum") + _, maxUnit := measurement.Scale(maxValue, o.SampleUnit, "minimum") + + unit := minUnit + if minUnit != maxUnit && minValue*100 < maxValue && o.OutputFormat != Callgrind { + // Minimum and maximum values have different units. Scale + // minimum by 100 to use larger units, allowing minimum value to + // be scaled down to 0.01, except for callgrind reports since + // they can only represent integer values. + _, unit = measurement.Scale(100*minValue, o.SampleUnit, "minimum") + } + + if unit != "" { + o.OutputUnit = unit + } else { + o.OutputUnit = o.SampleUnit + } +} + +// newGraph creates a new graph for this report. If nodes is non-nil, +// only nodes whose info matches are included. Otherwise, all nodes +// are included, without trimming. +func (rpt *Report) newGraph(nodes graph.NodeSet) *graph.Graph { + o := rpt.options + + // Clean up file paths using heuristics. + prof := rpt.prof + for _, f := range prof.Function { + f.Filename = trimPath(f.Filename) + } + + gopt := &graph.Options{ + SampleValue: o.SampleValue, + FormatTag: formatTag, + CallTree: o.CallTree && o.OutputFormat == Dot, + DropNegative: o.DropNegative, + KeptNodes: nodes, + } + + // Only keep binary names for disassembly-based reports, otherwise + // remove it to allow merging of functions across binaries. + switch o.OutputFormat { + case Raw, List, WebList, Dis: + gopt.ObjNames = true + } + + return graph.New(rpt.prof, gopt) +} + +func formatTag(v int64, key string) string { + return measurement.Label(v, key) +} + +func printTopProto(w io.Writer, rpt *Report) error { + p := rpt.prof + o := rpt.options + g, _, _, _ := rpt.newTrimmedGraph() + rpt.selectOutputUnit(g) + + out := profile.Profile{ + SampleType: []*profile.ValueType{ + {Type: "cum", Unit: o.OutputUnit}, + {Type: "flat", Unit: o.OutputUnit}, + }, + TimeNanos: p.TimeNanos, + DurationNanos: p.DurationNanos, + PeriodType: p.PeriodType, + Period: p.Period, + } + var flatSum int64 + for i, n := range g.Nodes { + name, flat, cum := n.Info.PrintableName(), n.Flat, n.Cum + + flatSum += flat + f := &profile.Function{ + ID: uint64(i + 1), + Name: name, + SystemName: name, + } + l := &profile.Location{ + ID: uint64(i + 1), + Line: []profile.Line{ + { + Function: f, + }, + }, + } + + fv, _ := measurement.Scale(flat, o.SampleUnit, o.OutputUnit) + cv, _ := measurement.Scale(cum, o.SampleUnit, o.OutputUnit) + s := &profile.Sample{ + Location: []*profile.Location{l}, + Value: []int64{int64(cv), int64(fv)}, + } + out.Function = append(out.Function, f) + out.Location = append(out.Location, l) + out.Sample = append(out.Sample, s) + } + + return out.Write(w) +} + +// printAssembly prints an annotated assembly listing. +func printAssembly(w io.Writer, rpt *Report, obj plugin.ObjTool) error { + o := rpt.options + prof := rpt.prof + + g := rpt.newGraph(nil) + + // If the regexp source can be parsed as an address, also match + // functions that land on that address. + var address *uint64 + if hex, err := strconv.ParseUint(o.Symbol.String(), 0, 64); err == nil { + address = &hex + } + + fmt.Fprintln(w, "Total:", rpt.formatValue(rpt.total)) + symbols := symbolsFromBinaries(prof, g, o.Symbol, address, obj) + symNodes := nodesPerSymbol(g.Nodes, symbols) + // Sort function names for printing. + var syms objSymbols + for s := range symNodes { + syms = append(syms, s) + } + sort.Sort(syms) + + // Correlate the symbols from the binary with the profile samples. + for _, s := range syms { + sns := symNodes[s] + + // Gather samples for this symbol. + flatSum, cumSum := sns.Sum() + + // Get the function assembly. + insns, err := obj.Disasm(s.sym.File, s.sym.Start, s.sym.End) + if err != nil { + return err + } + + ns := annotateAssembly(insns, sns, s.base) + + fmt.Fprintf(w, "ROUTINE ======================== %s\n", s.sym.Name[0]) + for _, name := range s.sym.Name[1:] { + fmt.Fprintf(w, " AKA ======================== %s\n", name) + } + fmt.Fprintf(w, "%10s %10s (flat, cum) %s of Total\n", + rpt.formatValue(flatSum), rpt.formatValue(cumSum), + percentage(cumSum, rpt.total)) + + for _, n := range ns { + fmt.Fprintf(w, "%10s %10s %10x: %s\n", valueOrDot(n.Flat, rpt), valueOrDot(n.Cum, rpt), n.Info.Address, n.Info.Name) + } + } + return nil +} + +// symbolsFromBinaries examines the binaries listed on the profile +// that have associated samples, and identifies symbols matching rx. +func symbolsFromBinaries(prof *profile.Profile, g *graph.Graph, rx *regexp.Regexp, address *uint64, obj plugin.ObjTool) []*objSymbol { + hasSamples := make(map[string]bool) + // Only examine mappings that have samples that match the + // regexp. This is an optimization to speed up pprof. + for _, n := range g.Nodes { + if name := n.Info.PrintableName(); rx.MatchString(name) && n.Info.Objfile != "" { + hasSamples[n.Info.Objfile] = true + } + } + + // Walk all mappings looking for matching functions with samples. + var objSyms []*objSymbol + for _, m := range prof.Mapping { + if !hasSamples[filepath.Base(m.File)] { + if address == nil || !(m.Start <= *address && *address <= m.Limit) { + continue + } + } + + f, err := obj.Open(m.File, m.Start, m.Limit, m.Offset) + if err != nil { + fmt.Printf("%v\n", err) + continue + } + + // Find symbols in this binary matching the user regexp. + var addr uint64 + if address != nil { + addr = *address + } + msyms, err := f.Symbols(rx, addr) + base := f.Base() + f.Close() + if err != nil { + continue + } + for _, ms := range msyms { + objSyms = append(objSyms, + &objSymbol{ + sym: ms, + base: base, + }, + ) + } + } + + return objSyms +} + +// objSym represents a symbol identified from a binary. It includes +// the SymbolInfo from the disasm package and the base that must be +// added to correspond to sample addresses +type objSymbol struct { + sym *plugin.Sym + base uint64 +} + +// objSymbols is a wrapper type to enable sorting of []*objSymbol. +type objSymbols []*objSymbol + +func (o objSymbols) Len() int { + return len(o) +} + +func (o objSymbols) Less(i, j int) bool { + if namei, namej := o[i].sym.Name[0], o[j].sym.Name[0]; namei != namej { + return namei < namej + } + return o[i].sym.Start < o[j].sym.Start +} + +func (o objSymbols) Swap(i, j int) { + o[i], o[j] = o[j], o[i] +} + +// nodesPerSymbol classifies nodes into a group of symbols. +func nodesPerSymbol(ns graph.Nodes, symbols []*objSymbol) map[*objSymbol]graph.Nodes { + symNodes := make(map[*objSymbol]graph.Nodes) + for _, s := range symbols { + // Gather samples for this symbol. + for _, n := range ns { + address := n.Info.Address - s.base + if address >= s.sym.Start && address < s.sym.End { + symNodes[s] = append(symNodes[s], n) + } + } + } + return symNodes +} + +// annotateAssembly annotates a set of assembly instructions with a +// set of samples. It returns a set of nodes to display. base is an +// offset to adjust the sample addresses. +func annotateAssembly(insns []plugin.Inst, samples graph.Nodes, base uint64) graph.Nodes { + // Add end marker to simplify printing loop. + insns = append(insns, plugin.Inst{^uint64(0), "", "", 0}) + + // Ensure samples are sorted by address. + samples.Sort(graph.AddressOrder) + + var s int + var asm graph.Nodes + for ix, in := range insns[:len(insns)-1] { + n := graph.Node{ + Info: graph.NodeInfo{ + Address: in.Addr, + Name: in.Text, + File: trimPath(in.File), + Lineno: in.Line, + }, + } + + // Sum all the samples until the next instruction (to account + // for samples attributed to the middle of an instruction). + for next := insns[ix+1].Addr; s < len(samples) && samples[s].Info.Address-base < next; s++ { + n.Flat += samples[s].Flat + n.Cum += samples[s].Cum + if samples[s].Info.File != "" { + n.Info.File = trimPath(samples[s].Info.File) + n.Info.Lineno = samples[s].Info.Lineno + } + } + asm = append(asm, &n) + } + + return asm +} + +// valueOrDot formats a value according to a report, intercepting zero +// values. +func valueOrDot(value int64, rpt *Report) string { + if value == 0 { + return "." + } + return rpt.formatValue(value) +} + +// canAccessFile determines if the filename can be opened for reading. +func canAccessFile(path string) bool { + if fi, err := os.Stat(path); err == nil { + return fi.Mode().Perm()&0400 != 0 + } + return false +} + +// printTags collects all tags referenced in the profile and prints +// them in a sorted table. +func printTags(w io.Writer, rpt *Report) error { + p := rpt.prof + + // Hashtable to keep accumulate tags as key,value,count. + tagMap := make(map[string]map[string]int64) + for _, s := range p.Sample { + for key, vals := range s.Label { + for _, val := range vals { + if valueMap, ok := tagMap[key]; ok { + valueMap[val] = valueMap[val] + s.Value[0] + continue + } + valueMap := make(map[string]int64) + valueMap[val] = s.Value[0] + tagMap[key] = valueMap + } + } + for key, vals := range s.NumLabel { + for _, nval := range vals { + val := measurement.Label(nval, key) + if valueMap, ok := tagMap[key]; ok { + valueMap[val] = valueMap[val] + s.Value[0] + continue + } + valueMap := make(map[string]int64) + valueMap[val] = s.Value[0] + tagMap[key] = valueMap + } + } + } + + tagKeys := make([]*graph.Tag, 0, len(tagMap)) + for key := range tagMap { + tagKeys = append(tagKeys, &graph.Tag{Name: key}) + } + for _, tagKey := range graph.SortTags(tagKeys, true) { + var total int64 + key := tagKey.Name + tags := make([]*graph.Tag, 0, len(tagMap[key])) + for t, c := range tagMap[key] { + total += c + tags = append(tags, &graph.Tag{Name: t, Flat: c}) + } + + fmt.Fprintf(w, "%s: Total %d\n", key, total) + for _, t := range graph.SortTags(tags, true) { + if total > 0 { + fmt.Fprintf(w, " %8d (%s): %s\n", t.Flat, + percentage(t.Flat, total), t.Name) + } else { + fmt.Fprintf(w, " %8d: %s\n", t.Flat, t.Name) + } + } + fmt.Fprintln(w) + } + return nil +} + +// printText prints a flat text report for a profile. +func printText(w io.Writer, rpt *Report) error { + g, origCount, droppedNodes, _ := rpt.newTrimmedGraph() + rpt.selectOutputUnit(g) + + fmt.Fprintln(w, strings.Join(reportLabels(rpt, g, origCount, droppedNodes, 0, false), "\n")) + + fmt.Fprintf(w, "%10s %5s%% %5s%% %10s %5s%%\n", + "flat", "flat", "sum", "cum", "cum") + + var flatSum int64 + for _, n := range g.Nodes { + name, flat, cum := n.Info.PrintableName(), n.Flat, n.Cum + + var inline, noinline bool + for _, e := range n.In { + if e.Inline { + inline = true + } else { + noinline = true + } + } + + if inline { + if noinline { + name = name + " (partial-inline)" + } else { + name = name + " (inline)" + } + } + + flatSum += flat + fmt.Fprintf(w, "%10s %s %s %10s %s %s\n", + rpt.formatValue(flat), + percentage(flat, rpt.total), + percentage(flatSum, rpt.total), + rpt.formatValue(cum), + percentage(cum, rpt.total), + name) + } + return nil +} + +// printTraces prints all traces from a profile. +func printTraces(w io.Writer, rpt *Report) error { + fmt.Fprintln(w, strings.Join(ProfileLabels(rpt), "\n")) + + prof := rpt.prof + o := rpt.options + + const separator = "-----------+-------------------------------------------------------" + + locations := graph.NewLocInfo(prof, false) + + for _, sample := range prof.Sample { + var stack []graph.NodeInfo + for _, loc := range sample.Location { + id := loc.ID + stack = append(stack, locations[id]...) + } + + if len(stack) == 0 { + continue + } + + fmt.Fprintln(w, separator) + // Print any text labels for the sample. + var labels []string + for s, vs := range sample.Label { + labels = append(labels, fmt.Sprintf("%10s: %s\n", s, strings.Join(vs, " "))) + } + sort.Strings(labels) + fmt.Fprint(w, strings.Join(labels, "")) + // Print call stack. + fmt.Fprintf(w, "%10s %s\n", + rpt.formatValue(o.SampleValue(sample.Value)), + stack[0].PrintableName()) + + for _, s := range stack[1:] { + fmt.Fprintf(w, "%10s %s\n", "", s.PrintableName()) + } + } + fmt.Fprintln(w, separator) + return nil +} + +// printCallgrind prints a graph for a profile on callgrind format. +func printCallgrind(w io.Writer, rpt *Report) error { + o := rpt.options + rpt.options.NodeFraction = 0 + rpt.options.EdgeFraction = 0 + rpt.options.NodeCount = 0 + + g, _, _, _ := rpt.newTrimmedGraph() + rpt.selectOutputUnit(g) + + fmt.Fprintln(w, "events:", o.SampleType+"("+o.OutputUnit+")") + + files := make(map[string]int) + names := make(map[string]int) + for _, n := range g.Nodes { + fmt.Fprintln(w, "fl="+callgrindName(files, n.Info.File)) + fmt.Fprintln(w, "fn="+callgrindName(names, n.Info.Name)) + sv, _ := measurement.Scale(n.Flat, o.SampleUnit, o.OutputUnit) + fmt.Fprintf(w, "%d %d\n", n.Info.Lineno, int64(sv)) + + // Print outgoing edges. + for _, out := range n.Out.Sort() { + c, _ := measurement.Scale(out.Weight, o.SampleUnit, o.OutputUnit) + callee := out.Dest + fmt.Fprintln(w, "cfl="+callgrindName(files, callee.Info.File)) + fmt.Fprintln(w, "cfn="+callgrindName(names, callee.Info.Name)) + // pprof doesn't have a flat weight for a call, leave as 0. + fmt.Fprintln(w, "calls=0", callee.Info.Lineno) + fmt.Fprintln(w, n.Info.Lineno, int64(c)) + } + fmt.Fprintln(w) + } + + return nil +} + +// callgrindName implements the callgrind naming compression scheme. +// For names not previously seen returns "(N) name", where N is a +// unique index. For names previously seen returns "(N)" where N is +// the index returned the first time. +func callgrindName(names map[string]int, name string) string { + if name == "" { + return "" + } + if id, ok := names[name]; ok { + return fmt.Sprintf("(%d)", id) + } + id := len(names) + 1 + names[name] = id + return fmt.Sprintf("(%d) %s", id, name) +} + +// printTree prints a tree-based report in text form. +func printTree(w io.Writer, rpt *Report) error { + const separator = "----------------------------------------------------------+-------------" + const legend = " flat flat% sum% cum cum% calls calls% + context " + + g, origCount, droppedNodes, _ := rpt.newTrimmedGraph() + rpt.selectOutputUnit(g) + + fmt.Fprintln(w, strings.Join(reportLabels(rpt, g, origCount, droppedNodes, 0, false), "\n")) + + fmt.Fprintln(w, separator) + fmt.Fprintln(w, legend) + var flatSum int64 + + rx := rpt.options.Symbol + for _, n := range g.Nodes { + name, flat, cum := n.Info.PrintableName(), n.Flat, n.Cum + + // Skip any entries that do not match the regexp (for the "peek" command). + if rx != nil && !rx.MatchString(name) { + continue + } + + fmt.Fprintln(w, separator) + // Print incoming edges. + inEdges := n.In.Sort() + for _, in := range inEdges { + var inline string + if in.Inline { + inline = " (inline)" + } + fmt.Fprintf(w, "%50s %s | %s%s\n", rpt.formatValue(in.Weight), + percentage(in.Weight, cum), in.Src.Info.PrintableName(), inline) + } + + // Print current node. + flatSum += flat + fmt.Fprintf(w, "%10s %s %s %10s %s | %s\n", + rpt.formatValue(flat), + percentage(flat, rpt.total), + percentage(flatSum, rpt.total), + rpt.formatValue(cum), + percentage(cum, rpt.total), + name) + + // Print outgoing edges. + outEdges := n.Out.Sort() + for _, out := range outEdges { + var inline string + if out.Inline { + inline = " (inline)" + } + fmt.Fprintf(w, "%50s %s | %s%s\n", rpt.formatValue(out.Weight), + percentage(out.Weight, cum), out.Dest.Info.PrintableName(), inline) + } + } + if len(g.Nodes) > 0 { + fmt.Fprintln(w, separator) + } + return nil +} + +// printDOT prints an annotated callgraph in DOT format. +func printDOT(w io.Writer, rpt *Report) error { + g, origCount, droppedNodes, droppedEdges := rpt.newTrimmedGraph() + rpt.selectOutputUnit(g) + labels := reportLabels(rpt, g, origCount, droppedNodes, droppedEdges, true) + + c := &graph.DotConfig{ + Title: rpt.options.Title, + Labels: labels, + FormatValue: rpt.formatValue, + Total: rpt.total, + } + graph.ComposeDot(w, g, &graph.DotAttributes{}, c) + return nil +} + +// percentage computes the percentage of total of a value, and encodes +// it as a string. At least two digits of precision are printed. +func percentage(value, total int64) string { + var ratio float64 + if total != 0 { + ratio = math.Abs(float64(value)/float64(total)) * 100 + } + switch { + case math.Abs(ratio) >= 99.95 && math.Abs(ratio) <= 100.05: + return " 100%" + case math.Abs(ratio) >= 1.0: + return fmt.Sprintf("%5.2f%%", ratio) + default: + return fmt.Sprintf("%5.2g%%", ratio) + } +} + +// ProfileLabels returns printable labels for a profile. +func ProfileLabels(rpt *Report) []string { + label := []string{} + prof := rpt.prof + o := rpt.options + if len(prof.Mapping) > 0 { + if prof.Mapping[0].File != "" { + label = append(label, "File: "+filepath.Base(prof.Mapping[0].File)) + } + if prof.Mapping[0].BuildID != "" { + label = append(label, "Build ID: "+prof.Mapping[0].BuildID) + } + } + label = append(label, prof.Comments...) + if o.SampleType != "" { + label = append(label, "Type: "+o.SampleType) + } + if prof.TimeNanos != 0 { + const layout = "Jan 2, 2006 at 3:04pm (MST)" + label = append(label, "Time: "+time.Unix(0, prof.TimeNanos).Format(layout)) + } + if prof.DurationNanos != 0 { + duration := measurement.Label(prof.DurationNanos, "nanoseconds") + totalNanos, totalUnit := measurement.Scale(rpt.total, o.SampleUnit, "nanoseconds") + var ratio string + if totalUnit == "ns" && totalNanos != 0 { + ratio = "(" + percentage(int64(totalNanos), prof.DurationNanos) + ")" + } + label = append(label, fmt.Sprintf("Duration: %s, Total samples = %s %s", duration, rpt.formatValue(rpt.total), ratio)) + } + return label +} + +// reportLabels returns printable labels for a report. Includes +// profileLabels. +func reportLabels(rpt *Report, g *graph.Graph, origCount, droppedNodes, droppedEdges int, fullHeaders bool) []string { + nodeFraction := rpt.options.NodeFraction + edgeFraction := rpt.options.EdgeFraction + nodeCount := rpt.options.NodeCount + + var label []string + if len(rpt.options.ProfileLabels) > 0 { + for _, l := range rpt.options.ProfileLabels { + label = append(label, l) + } + } else if fullHeaders || !rpt.options.CompactLabels { + label = ProfileLabels(rpt) + } + + var flatSum int64 + for _, n := range g.Nodes { + flatSum = flatSum + n.Flat + } + + label = append(label, fmt.Sprintf("Showing nodes accounting for %s, %s of %s total", rpt.formatValue(flatSum), strings.TrimSpace(percentage(flatSum, rpt.total)), rpt.formatValue(rpt.total))) + + if rpt.total != 0 { + if droppedNodes > 0 { + label = append(label, genLabel(droppedNodes, "node", "cum", + rpt.formatValue(abs64(int64(float64(rpt.total)*nodeFraction))))) + } + if droppedEdges > 0 { + label = append(label, genLabel(droppedEdges, "edge", "freq", + rpt.formatValue(abs64(int64(float64(rpt.total)*edgeFraction))))) + } + if nodeCount > 0 && nodeCount < origCount { + label = append(label, fmt.Sprintf("Showing top %d nodes out of %d (cum >= %s)", + nodeCount, origCount, + rpt.formatValue(g.Nodes[len(g.Nodes)-1].Cum))) + } + } + return label +} + +func genLabel(d int, n, l, f string) string { + if d > 1 { + n = n + "s" + } + return fmt.Sprintf("Dropped %d %s (%s <= %s)", d, n, l, f) +} + +// Output formats. +const ( + Proto = iota + Dot + Tags + Tree + Text + Traces + Raw + Dis + List + WebList + Callgrind + TopProto +) + +// Options are the formatting and filtering options used to generate a +// profile. +type Options struct { + OutputFormat int + + CumSort bool + CallTree bool + DropNegative bool + PositivePercentages bool + CompactLabels bool + Ratio float64 + Title string + ProfileLabels []string + + NodeCount int + NodeFraction float64 + EdgeFraction float64 + + SampleValue func(s []int64) int64 + SampleType string + SampleUnit string // Unit for the sample data from the profile. + + OutputUnit string // Units for data formatting in report. + + Symbol *regexp.Regexp // Symbols to include on disassembly report. +} + +// New builds a new report indexing the sample values interpreting the +// samples with the provided function. +func New(prof *profile.Profile, o *Options) *Report { + format := func(v int64) string { + if r := o.Ratio; r > 0 && r != 1 { + fv := float64(v) * r + v = int64(fv) + } + return measurement.ScaledLabel(v, o.SampleUnit, o.OutputUnit) + } + return &Report{prof, computeTotal(prof, o.SampleValue, !o.PositivePercentages), + o, format} +} + +// NewDefault builds a new report indexing the last sample value +// available. +func NewDefault(prof *profile.Profile, options Options) *Report { + index := len(prof.SampleType) - 1 + o := &options + if o.Title == "" && len(prof.Mapping) > 0 { + o.Title = filepath.Base(prof.Mapping[0].File) + } + o.SampleType = prof.SampleType[index].Type + o.SampleUnit = strings.ToLower(prof.SampleType[index].Unit) + o.SampleValue = func(v []int64) int64 { + return v[index] + } + return New(prof, o) +} + +// computeTotal computes the sum of all sample values. This will be +// used to compute percentages. If includeNegative is set, use use +// absolute values to provide a meaningful percentage for both +// negative and positive values. Otherwise only use positive values, +// which is useful when comparing profiles from different jobs. +func computeTotal(prof *profile.Profile, value func(v []int64) int64, includeNegative bool) int64 { + var ret int64 + for _, sample := range prof.Sample { + if v := value(sample.Value); v > 0 { + ret += v + } else if includeNegative { + ret -= v + } + } + return ret +} + +// Report contains the data and associated routines to extract a +// report from a profile. +type Report struct { + prof *profile.Profile + total int64 + options *Options + formatValue func(int64) string +} + +func (rpt *Report) formatTags(s *profile.Sample) (string, bool) { + var labels []string + for key, vals := range s.Label { + for _, v := range vals { + labels = append(labels, key+":"+v) + } + } + for key, nvals := range s.NumLabel { + for _, v := range nvals { + labels = append(labels, measurement.Label(v, key)) + } + } + if len(labels) == 0 { + return "", false + } + sort.Strings(labels) + return strings.Join(labels, `\n`), true +} + +func abs64(i int64) int64 { + if i < 0 { + return -i + } + return i +} diff --git a/src/internal/report/report_test.go b/src/internal/report/report_test.go new file mode 100644 index 00000000..03f62e52 --- /dev/null +++ b/src/internal/report/report_test.go @@ -0,0 +1,210 @@ +// Copyright 2014 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package report + +import ( + "bytes" + "io/ioutil" + "path/filepath" + "regexp" + "testing" + + "internal/binutils" + "profile" + "internal/proftest" +) + +type testcase struct { + rpt *Report + want string +} + +func TestSource(t *testing.T) { + const path = "testdata/" + + sampleValue1 := func(v []int64) int64 { + return v[1] + } + + for _, tc := range []testcase{ + { + rpt: New( + testProfile.Copy(), + &Options{ + OutputFormat: List, + Symbol: regexp.MustCompile(`.`), + Title: filepath.Base(testProfile.Mapping[0].File), + + SampleValue: sampleValue1, + SampleUnit: testProfile.SampleType[1].Unit, + }, + ), + want: path + "source.rpt", + }, + { + rpt: New( + testProfile.Copy(), + &Options{ + OutputFormat: Dot, + CallTree: true, + Symbol: regexp.MustCompile(`.`), + Title: filepath.Base(testProfile.Mapping[0].File), + + SampleValue: sampleValue1, + SampleUnit: testProfile.SampleType[1].Unit, + }, + ), + want: path + "source.dot", + }, + } { + b := bytes.NewBuffer(nil) + if err := Generate(b, tc.rpt, &binutils.Binutils{}); err != nil { + t.Fatalf("%s: %v", tc.want, err) + } + + gold, err := ioutil.ReadFile(tc.want) + if err != nil { + t.Fatalf("%s: %v", tc.want, err) + } + if string(b.String()) != string(gold) { + d, err := proftest.Diff(gold, b.Bytes()) + if err != nil { + t.Fatalf("%s: %v", "source", err) + } + t.Error("source" + "\n" + string(d) + "\n" + "gold:\n" + tc.want) + } + } +} + +var testM = []*profile.Mapping{ + { + ID: 1, + HasFunctions: true, + HasFilenames: true, + HasLineNumbers: true, + HasInlineFrames: true, + }, +} + +var testF = []*profile.Function{ + { + ID: 1, + Name: "main", + Filename: "testdata/source1", + }, + { + ID: 2, + Name: "foo", + Filename: "testdata/source1", + }, + { + ID: 3, + Name: "bar", + Filename: "testdata/source1", + }, + { + ID: 4, + Name: "tee", + Filename: "testdata/source2", + }, +} + +var testL = []*profile.Location{ + { + ID: 1, + Mapping: testM[0], + Line: []profile.Line{ + { + Function: testF[0], + Line: 2, + }, + }, + }, + { + ID: 2, + Mapping: testM[0], + Line: []profile.Line{ + { + Function: testF[1], + Line: 4, + }, + }, + }, + { + ID: 3, + Mapping: testM[0], + Line: []profile.Line{ + { + Function: testF[2], + Line: 10, + }, + }, + }, + { + ID: 4, + Mapping: testM[0], + Line: []profile.Line{ + { + Function: testF[3], + Line: 2, + }, + }, + }, + { + ID: 5, + Mapping: testM[0], + Line: []profile.Line{ + { + Function: testF[3], + Line: 8, + }, + }, + }, +} + +var testProfile = &profile.Profile{ + PeriodType: &profile.ValueType{Type: "cpu", Unit: "millisecond"}, + Period: 10, + DurationNanos: 10e9, + SampleType: []*profile.ValueType{ + {Type: "samples", Unit: "count"}, + {Type: "cpu", Unit: "cycles"}, + }, + Sample: []*profile.Sample{ + { + Location: []*profile.Location{testL[0]}, + Value: []int64{1, 1}, + }, + { + Location: []*profile.Location{testL[2], testL[1], testL[0]}, + Value: []int64{1, 10}, + }, + { + Location: []*profile.Location{testL[4], testL[2], testL[0]}, + Value: []int64{1, 100}, + }, + { + Location: []*profile.Location{testL[3], testL[0]}, + Value: []int64{1, 1000}, + }, + { + Location: []*profile.Location{testL[4], testL[3], testL[0]}, + Value: []int64{1, 10000}, + }, + }, + Location: testL, + Function: testF, + Mapping: testM, +} diff --git a/src/internal/report/source.go b/src/internal/report/source.go new file mode 100644 index 00000000..485f2e8f --- /dev/null +++ b/src/internal/report/source.go @@ -0,0 +1,457 @@ +// Copyright 2014 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package report + +// This file contains routines related to the generation of annotated +// source listings. + +import ( + "bufio" + "fmt" + "html/template" + "io" + "os" + "path/filepath" + "sort" + "strconv" + "strings" + + "internal/graph" + "internal/plugin" +) + +// printSource prints an annotated source listing, include all +// functions with samples that match the regexp rpt.options.symbol. +// The sources are sorted by function name and then by filename to +// eliminate potential nondeterminism. +func printSource(w io.Writer, rpt *Report) error { + o := rpt.options + g := rpt.newGraph(nil) + + // Identify all the functions that match the regexp provided. + // Group nodes for each matching function. + var functions graph.Nodes + functionNodes := make(map[string]graph.Nodes) + for _, n := range g.Nodes { + if !o.Symbol.MatchString(n.Info.Name) { + continue + } + if functionNodes[n.Info.Name] == nil { + functions = append(functions, n) + } + functionNodes[n.Info.Name] = append(functionNodes[n.Info.Name], n) + } + functions.Sort(graph.NameOrder) + + fmt.Fprintf(w, "Total: %s\n", rpt.formatValue(rpt.total)) + for _, fn := range functions { + name := fn.Info.Name + + // Identify all the source files associated to this function. + // Group nodes for each source file. + var sourceFiles graph.Nodes + fileNodes := make(map[string]graph.Nodes) + for _, n := range functionNodes[name] { + if n.Info.File == "" { + continue + } + if fileNodes[n.Info.File] == nil { + sourceFiles = append(sourceFiles, n) + } + fileNodes[n.Info.File] = append(fileNodes[n.Info.File], n) + } + + if len(sourceFiles) == 0 { + fmt.Fprintf(w, "No source information for %s\n", name) + continue + } + + sourceFiles.Sort(graph.FileOrder) + + // Print each file associated with this function. + for _, fl := range sourceFiles { + filename := fl.Info.File + fns := fileNodes[filename] + flatSum, cumSum := fns.Sum() + + fnodes, path, err := getFunctionSource(name, filename, fns, 0, 0) + fmt.Fprintf(w, "ROUTINE ======================== %s in %s\n", name, path) + fmt.Fprintf(w, "%10s %10s (flat, cum) %s of Total\n", + rpt.formatValue(flatSum), rpt.formatValue(cumSum), + percentage(cumSum, rpt.total)) + + if err != nil { + fmt.Fprintf(w, " Error: %v\n", err) + continue + } + + for _, fn := range fnodes { + fmt.Fprintf(w, "%10s %10s %6d:%s\n", valueOrDot(fn.Flat, rpt), valueOrDot(fn.Cum, rpt), fn.Info.Lineno, fn.Info.Name) + } + } + } + return nil +} + +// printWebSource prints an annotated source listing, include all +// functions with samples that match the regexp rpt.options.symbol. +func printWebSource(w io.Writer, rpt *Report, obj plugin.ObjTool) error { + o := rpt.options + g := rpt.newGraph(nil) + + // If the regexp source can be parsed as an address, also match + // functions that land on that address. + var address *uint64 + if hex, err := strconv.ParseUint(o.Symbol.String(), 0, 64); err == nil { + address = &hex + } + + // Extract interesting symbols from binary files in the profile and + // classify samples per symbol. + symbols := symbolsFromBinaries(rpt.prof, g, o.Symbol, address, obj) + symNodes := nodesPerSymbol(g.Nodes, symbols) + + // Sort symbols for printing. + var syms objSymbols + for s := range symNodes { + syms = append(syms, s) + } + sort.Sort(syms) + + if len(syms) == 0 { + return fmt.Errorf("no samples found on routines matching: %s", o.Symbol.String()) + } + + printHeader(w, rpt) + for _, s := range syms { + name := s.sym.Name[0] + // Identify sources associated to a symbol by examining + // symbol samples. Classify samples per source file. + var sourceFiles graph.Nodes + fileNodes := make(map[string]graph.Nodes) + for _, n := range symNodes[s] { + if n.Info.File == "" { + continue + } + if fileNodes[n.Info.File] == nil { + sourceFiles = append(sourceFiles, n) + } + fileNodes[n.Info.File] = append(fileNodes[n.Info.File], n) + } + + if len(sourceFiles) == 0 { + fmt.Fprintf(w, "No source information for %s\n", name) + continue + } + + sourceFiles.Sort(graph.FileOrder) + + // Print each file associated with this function. + for _, fl := range sourceFiles { + filename := fl.Info.File + fns := fileNodes[filename] + + asm := assemblyPerSourceLine(symbols, fns, filename, obj) + start, end := sourceCoordinates(asm) + + fnodes, path, err := getFunctionSource(name, filename, fns, start, end) + if err != nil { + fnodes, path = getMissingFunctionSource(filename, asm, start, end) + } + + flatSum, cumSum := fnodes.Sum() + printFunctionHeader(w, name, path, flatSum, cumSum, rpt) + for _, fn := range fnodes { + printFunctionSourceLine(w, fn, asm[fn.Info.Lineno], rpt) + } + printFunctionClosing(w) + } + } + printPageClosing(w) + return nil +} + +// sourceCoordinates returns the lowest and highest line numbers from +// a set of assembly statements. +func sourceCoordinates(asm map[int]graph.Nodes) (start, end int) { + for l := range asm { + if start == 0 || l < start { + start = l + } + if end == 0 || l > end { + end = l + } + } + return start, end +} + +// assemblyPerSourceLine disassembles the binary containing a symbol +// and classifies the assembly instructions according to its +// corresponding source line, annotating them with a set of samples. +func assemblyPerSourceLine(objSyms []*objSymbol, rs graph.Nodes, src string, obj plugin.ObjTool) map[int]graph.Nodes { + assembly := make(map[int]graph.Nodes) + // Identify symbol to use for this collection of samples. + o := findMatchingSymbol(objSyms, rs) + if o == nil { + return assembly + } + + // Extract assembly for matched symbol + insns, err := obj.Disasm(o.sym.File, o.sym.Start, o.sym.End) + if err != nil { + return assembly + } + + srcBase := filepath.Base(src) + anodes := annotateAssembly(insns, rs, o.base) + var lineno = 0 + for _, an := range anodes { + if filepath.Base(an.Info.File) == srcBase { + lineno = an.Info.Lineno + } + if lineno != 0 { + assembly[lineno] = append(assembly[lineno], an) + } + } + + return assembly +} + +// findMatchingSymbol looks for the symbol that corresponds to a set +// of samples, by comparing their addresses. +func findMatchingSymbol(objSyms []*objSymbol, ns graph.Nodes) *objSymbol { + for _, n := range ns { + for _, o := range objSyms { + if filepath.Base(o.sym.File) == n.Info.Objfile && + o.sym.Start <= n.Info.Address-o.base && + n.Info.Address-o.base <= o.sym.End { + return o + } + } + } + return nil +} + +// printHeader prints the page header for a weblist report. +func printHeader(w io.Writer, rpt *Report) { + fmt.Fprintln(w, weblistPageHeader) + + var labels []string + for _, l := range ProfileLabels(rpt) { + labels = append(labels, template.HTMLEscapeString(l)) + } + + fmt.Fprintf(w, `
%s
Total: %s
`, + strings.Join(labels, "
\n"), + rpt.formatValue(rpt.total), + ) +} + +// printFunctionHeader prints a function header for a weblist report. +func printFunctionHeader(w io.Writer, name, path string, flatSum, cumSum int64, rpt *Report) { + fmt.Fprintf(w, `

%s

%s +
+  Total:  %10s %10s (flat, cum) %s
+`,
+		template.HTMLEscapeString(name), template.HTMLEscapeString(path),
+		rpt.formatValue(flatSum), rpt.formatValue(cumSum),
+		percentage(cumSum, rpt.total))
+}
+
+// printFunctionSourceLine prints a source line and the corresponding assembly.
+func printFunctionSourceLine(w io.Writer, fn *graph.Node, assembly graph.Nodes, rpt *Report) {
+	if len(assembly) == 0 {
+		fmt.Fprintf(w,
+			" %6d   %10s %10s %s \n",
+			fn.Info.Lineno,
+			valueOrDot(fn.Flat, rpt), valueOrDot(fn.Cum, rpt),
+			template.HTMLEscapeString(fn.Info.Name))
+		return
+	}
+
+	fmt.Fprintf(w,
+		" %6d   %10s %10s %s ",
+		fn.Info.Lineno,
+		valueOrDot(fn.Flat, rpt), valueOrDot(fn.Cum, rpt),
+		template.HTMLEscapeString(fn.Info.Name))
+	fmt.Fprint(w, "")
+	for _, an := range assembly {
+		var fileline string
+		class := "disasmloc"
+		if an.Info.File != "" {
+			fileline = fmt.Sprintf("%s:%d", template.HTMLEscapeString(an.Info.File), an.Info.Lineno)
+			if an.Info.Lineno != fn.Info.Lineno {
+				class = "unimportant"
+			}
+		}
+		fmt.Fprintf(w, " %8s %10s %10s %8x: %-48s %s\n", "",
+			valueOrDot(an.Flat, rpt), valueOrDot(an.Cum, rpt),
+			an.Info.Address,
+			template.HTMLEscapeString(an.Info.Name),
+			class,
+			template.HTMLEscapeString(fileline))
+	}
+	fmt.Fprintln(w, "")
+}
+
+// printFunctionClosing prints the end of a function in a weblist report.
+func printFunctionClosing(w io.Writer) {
+	fmt.Fprintln(w, "
") +} + +// printPageClosing prints the end of the page in a weblist report. +func printPageClosing(w io.Writer) { + fmt.Fprintln(w, weblistPageClosing) +} + +// getFunctionSource collects the sources of a function from a source +// file and annotates it with the samples in fns. Returns the sources +// as nodes, using the info.name field to hold the source code. +func getFunctionSource(fun, file string, fns graph.Nodes, start, end int) (graph.Nodes, string, error) { + f, file, err := adjustSourcePath(file) + if err != nil { + return nil, file, err + } + + lineNodes := make(map[int]graph.Nodes) + // Collect source coordinates from profile. + const margin = 5 // Lines before first/after last sample. + if start == 0 { + if fns[0].Info.StartLine != 0 { + start = fns[0].Info.StartLine + } else { + start = fns[0].Info.Lineno - margin + } + } else { + start -= margin + } + if end == 0 { + end = fns[0].Info.Lineno + } + end += margin + for _, n := range fns { + lineno := n.Info.Lineno + nodeStart := n.Info.StartLine + if nodeStart == 0 { + nodeStart = lineno - margin + } + nodeEnd := lineno + margin + if nodeStart < start { + start = nodeStart + } else if nodeEnd > end { + end = nodeEnd + } + lineNodes[lineno] = append(lineNodes[lineno], n) + } + + var src graph.Nodes + buf := bufio.NewReader(f) + lineno := 1 + for { + line, err := buf.ReadString('\n') + if err != nil { + if err != io.EOF { + return nil, file, err + } + if line == "" { + break + } + } + if lineno >= start { + flat, cum := lineNodes[lineno].Sum() + + src = append(src, &graph.Node{ + Info: graph.NodeInfo{ + Name: strings.TrimRight(line, "\n"), + Lineno: lineno, + }, + Flat: flat, + Cum: cum, + }) + } + lineno++ + if lineno > end { + break + } + } + return src, file, nil +} + +// getMissingFunctionSource creates a dummy function body to point to +// the source file and annotates it with the samples in asm. +func getMissingFunctionSource(filename string, asm map[int]graph.Nodes, start, end int) (graph.Nodes, string) { + var fnodes graph.Nodes + for i := start; i <= end; i++ { + lrs := asm[i] + if len(lrs) == 0 { + continue + } + flat, cum := lrs.Sum() + fnodes = append(fnodes, &graph.Node{ + Info: graph.NodeInfo{ + Name: "???", + Lineno: i, + }, + Flat: flat, + Cum: cum, + }) + } + return fnodes, filename +} + +// adjustSourcePath adjusts the path for a source file by trimmming +// known prefixes and searching for the file on all parents of the +// current working dir. +func adjustSourcePath(path string) (*os.File, string, error) { + path = trimPath(path) + f, err := os.Open(path) + if err == nil { + return f, path, nil + } + + if dir, wderr := os.Getwd(); wderr == nil { + for { + parent := filepath.Dir(dir) + if parent == dir { + break + } + if f, err := os.Open(filepath.Join(parent, path)); err == nil { + return f, filepath.Join(parent, path), nil + } + + dir = parent + } + } + + return nil, path, err +} + +// trimPath cleans up a path by removing prefixes that are commonly +// found on profiles. +func trimPath(path string) string { + basePaths := []string{ + "/proc/self/cwd/./", + "/proc/self/cwd/", + } + + sPath := filepath.ToSlash(path) + + for _, base := range basePaths { + if strings.HasPrefix(sPath, base) { + return filepath.FromSlash(sPath[len(base):]) + } + } + return path +} diff --git a/src/internal/report/source_html.go b/src/internal/report/source_html.go new file mode 100644 index 00000000..2bb81f20 --- /dev/null +++ b/src/internal/report/source_html.go @@ -0,0 +1,87 @@ +// Copyright 2014 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package report + +const weblistPageHeader = ` + + + +Pprof listing + + + + +` + +const weblistPageClosing = ` + + +` diff --git a/src/internal/report/testdata/source.dot b/src/internal/report/testdata/source.dot new file mode 100644 index 00000000..384bdd84 --- /dev/null +++ b/src/internal/report/testdata/source.dot @@ -0,0 +1,17 @@ +digraph "." { +node [style=filled fillcolor="#f8f8f8"] +subgraph cluster_L { "Duration: 10s, Total samples = 11111 " [shape=box fontsize=16 label="Duration: 10s, Total samples = 11111 \lShowing nodes accounting for 11111, 100% of 11111 total\l"] } +N1 [label="tee\nsource2:8\n10000 (90.00%)" fontsize=24 shape=box tooltip="tee testdata/source2:8 (10000)" color="#b20500" fillcolor="#edd6d5"] +N2 [label="main\nsource1:2\n1 (0.009%)\nof 11111 (100%)" fontsize=9 shape=box tooltip="main testdata/source1:2 (11111)" color="#b20000" fillcolor="#edd5d5"] +N3 [label="tee\nsource2:2\n1000 (9.00%)\nof 11000 (99.00%)" fontsize=14 shape=box tooltip="tee testdata/source2:2 (11000)" color="#b20000" fillcolor="#edd5d5"] +N4 [label="tee\nsource2:8\n100 (0.9%)" fontsize=10 shape=box tooltip="tee testdata/source2:8 (100)" color="#b2b0aa" fillcolor="#edecec"] +N5 [label="bar\nsource1:10\n10 (0.09%)" fontsize=9 shape=box tooltip="bar testdata/source1:10 (10)" color="#b2b2b1" fillcolor="#ededed"] +N6 [label="bar\nsource1:10\n0 of 100 (0.9%)" fontsize=8 shape=box tooltip="bar testdata/source1:10 (100)" color="#b2b0aa" fillcolor="#edecec"] +N7 [label="foo\nsource1:4\n0 of 10 (0.09%)" fontsize=8 shape=box tooltip="foo testdata/source1:4 (10)" color="#b2b2b1" fillcolor="#ededed"] +N2 -> N3 [label=" 11000" weight=100 penwidth=5 color="#b20000" tooltip="main testdata/source1:2 -> tee testdata/source2:2 (11000)" labeltooltip="main testdata/source1:2 -> tee testdata/source2:2 (11000)"] +N3 -> N1 [label=" 10000" weight=91 penwidth=5 color="#b20500" tooltip="tee testdata/source2:2 -> tee testdata/source2:8 (10000)" labeltooltip="tee testdata/source2:2 -> tee testdata/source2:8 (10000)"] +N6 -> N4 [label=" 100" color="#b2b0aa" tooltip="bar testdata/source1:10 -> tee testdata/source2:8 (100)" labeltooltip="bar testdata/source1:10 -> tee testdata/source2:8 (100)"] +N2 -> N6 [label=" 100" color="#b2b0aa" tooltip="main testdata/source1:2 -> bar testdata/source1:10 (100)" labeltooltip="main testdata/source1:2 -> bar testdata/source1:10 (100)"] +N7 -> N5 [label=" 10" color="#b2b2b1" tooltip="foo testdata/source1:4 -> bar testdata/source1:10 (10)" labeltooltip="foo testdata/source1:4 -> bar testdata/source1:10 (10)"] +N2 -> N7 [label=" 10" color="#b2b2b1" tooltip="main testdata/source1:2 -> foo testdata/source1:4 (10)" labeltooltip="main testdata/source1:2 -> foo testdata/source1:4 (10)"] +} diff --git a/src/internal/report/testdata/source.rpt b/src/internal/report/testdata/source.rpt new file mode 100644 index 00000000..9ec7b3b0 --- /dev/null +++ b/src/internal/report/testdata/source.rpt @@ -0,0 +1,49 @@ +Total: 11111 +ROUTINE ======================== bar in testdata/source1 + 10 110 (flat, cum) 0.99% of Total + . . 5:source1 line 5; + . . 6:source1 line 6; + . . 7:source1 line 7; + . . 8:source1 line 8; + . . 9:source1 line 9; + 10 110 10:source1 line 10; + . . 11:source1 line 11; + . . 12:source1 line 12; + . . 13:source1 line 13; + . . 14:source1 line 14; + . . 15:source1 line 15; +ROUTINE ======================== foo in testdata/source1 + 0 10 (flat, cum) 0.09% of Total + . . 1:source1 line 1; + . . 2:source1 line 2; + . . 3:source1 line 3; + . 10 4:source1 line 4; + . . 5:source1 line 5; + . . 6:source1 line 6; + . . 7:source1 line 7; + . . 8:source1 line 8; + . . 9:source1 line 9; +ROUTINE ======================== main in testdata/source1 + 1 11111 (flat, cum) 100% of Total + . . 1:source1 line 1; + 1 11111 2:source1 line 2; + . . 3:source1 line 3; + . . 4:source1 line 4; + . . 5:source1 line 5; + . . 6:source1 line 6; + . . 7:source1 line 7; +ROUTINE ======================== tee in testdata/source2 + 11100 21100 (flat, cum) 189.90% of Total + . . 1:source2 line 1; + 1000 11000 2:source2 line 2; + . . 3:source2 line 3; + . . 4:source2 line 4; + . . 5:source2 line 5; + . . 6:source2 line 6; + . . 7:source2 line 7; + 10100 10100 8:source2 line 8; + . . 9:source2 line 9; + . . 10:source2 line 10; + . . 11:source2 line 11; + . . 12:source2 line 12; + . . 13:source2 line 13; diff --git a/src/internal/report/testdata/source1 b/src/internal/report/testdata/source1 new file mode 100644 index 00000000..70e3fc33 --- /dev/null +++ b/src/internal/report/testdata/source1 @@ -0,0 +1,19 @@ +source1 line 1; +source1 line 2; +source1 line 3; +source1 line 4; +source1 line 5; +source1 line 6; +source1 line 7; +source1 line 8; +source1 line 9; +source1 line 10; +source1 line 11; +source1 line 12; +source1 line 13; +source1 line 14; +source1 line 15; +source1 line 16; +source1 line 17; +source1 line 18; + diff --git a/src/internal/report/testdata/source2 b/src/internal/report/testdata/source2 new file mode 100644 index 00000000..54f99cca --- /dev/null +++ b/src/internal/report/testdata/source2 @@ -0,0 +1,19 @@ +source2 line 1; +source2 line 2; +source2 line 3; +source2 line 4; +source2 line 5; +source2 line 6; +source2 line 7; +source2 line 8; +source2 line 9; +source2 line 10; +source2 line 11; +source2 line 12; +source2 line 13; +source2 line 14; +source2 line 15; +source2 line 16; +source2 line 17; +source2 line 18; + diff --git a/src/internal/symbolizer/symbolizer.go b/src/internal/symbolizer/symbolizer.go new file mode 100644 index 00000000..b0a9cebd --- /dev/null +++ b/src/internal/symbolizer/symbolizer.go @@ -0,0 +1,320 @@ +// Copyright 2014 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package symbolizer provides a routine to populate a profile with +// symbol, file and line number information. It relies on the +// addr2liner and demangle packages to do the actual work. +package symbolizer + +import ( + "fmt" + "io/ioutil" + "net/http" + "path/filepath" + "strings" + + "internal/binutils" + "internal/plugin" + "profile" + "internal/symbolz" + "golang/demangle" +) + +// Symbolizer implements the plugin.Symbolize interface. +type Symbolizer struct { + Obj plugin.ObjTool + UI plugin.UI +} + +// Symbolize attempts to symbolize profile p. First uses binutils on +// local binaries; if the source is a URL it attempts to get any +// missed entries using symbolz. +func (s *Symbolizer) Symbolize(mode string, sources plugin.MappingSources, p *profile.Profile) error { + remote, local, force, demanglerMode := true, true, false, "" + for _, o := range strings.Split(strings.ToLower(mode), ":") { + switch o { + case "none", "no": + return nil + case "local", "fastlocal": + remote, local = false, true + case "remote": + remote, local = true, false + case "", "force": + force = true + default: + switch d := strings.TrimPrefix(o, "demangle="); d { + case "full", "none", "templates": + demanglerMode = d + force = true + continue + case "default": + continue + } + s.UI.PrintErr("ignoring unrecognized symbolization option: " + mode) + s.UI.PrintErr("expecting -symbolize=[local|fastlocal|remote|none][:force][:demangle=[none|full|templates|default]") + } + } + + var err error + if local { + // Symbolize locally using binutils. + if err = localSymbolize(mode, p, s.Obj, s.UI); err == nil { + remote = false // Already symbolized, no need to apply remote symbolization. + } + } + if remote { + if err = symbolz.Symbolize(sources, postURL, p, s.UI); err != nil { + return err // Ran out of options. + } + } + + Demangle(p, force, demanglerMode) + return nil +} + +// postURL issues a POST to a URL over HTTP. +func postURL(source, post string) ([]byte, error) { + resp, err := http.Post(source, "application/octet-stream", strings.NewReader(post)) + if err != nil { + return nil, fmt.Errorf("http post %s: %v", source, err) + } + if resp.StatusCode != http.StatusOK { + return nil, fmt.Errorf("server response: %s", resp.Status) + } + defer resp.Body.Close() + return ioutil.ReadAll(resp.Body) +} + +// localSymbolize adds symbol and line number information to all locations +// in a profile. mode enables some options to control +// symbolization. +func localSymbolize(mode string, prof *profile.Profile, obj plugin.ObjTool, ui plugin.UI) error { + force := false + // Disable some mechanisms based on mode string. + for _, o := range strings.Split(strings.ToLower(mode), ":") { + switch { + case o == "force": + force = true + case o == "fastlocal": + if bu, ok := obj.(*binutils.Binutils); ok { + bu.SetFastSymbolization(true) + } + default: + } + } + + mt, err := newMapping(prof, obj, ui, force) + if err != nil { + return err + } + defer mt.close() + + functions := make(map[profile.Function]*profile.Function) + for _, l := range mt.prof.Location { + m := l.Mapping + segment := mt.segments[m] + if segment == nil { + // Nothing to do. + continue + } + + stack, err := segment.SourceLine(l.Address) + if err != nil || len(stack) == 0 { + // No answers from addr2line. + continue + } + + l.Line = make([]profile.Line, len(stack)) + for i, frame := range stack { + if frame.Func != "" { + m.HasFunctions = true + } + if frame.File != "" { + m.HasFilenames = true + } + if frame.Line != 0 { + m.HasLineNumbers = true + } + f := &profile.Function{ + Name: frame.Func, + SystemName: frame.Func, + Filename: frame.File, + } + if fp := functions[*f]; fp != nil { + f = fp + } else { + functions[*f] = f + f.ID = uint64(len(mt.prof.Function)) + 1 + mt.prof.Function = append(mt.prof.Function, f) + } + l.Line[i] = profile.Line{ + Function: f, + Line: int64(frame.Line), + } + } + + if len(stack) > 0 { + m.HasInlineFrames = true + } + } + + return nil +} + +// Demangle updates the function names in a profile with demangled C++ +// names, simplified according to demanglerMode. If force is set, +// overwrite any names that appear already demangled. +func Demangle(prof *profile.Profile, force bool, demanglerMode string) { + if force { + // Remove the current demangled names to force demangling + for _, f := range prof.Function { + if f.Name != "" && f.SystemName != "" { + f.Name = f.SystemName + } + } + } + + var options []demangle.Option + switch demanglerMode { + case "": // demangled, simplified: no parameters, no templates, no return type + options = []demangle.Option{demangle.NoParams, demangle.NoTemplateParams} + case "templates": // demangled, simplified: no parameters, no return type + options = []demangle.Option{demangle.NoParams} + case "full": + options = []demangle.Option{demangle.NoClones} + case "none": // no demangling + return + } + + // Copy the options because they may be updated by the call. + o := make([]demangle.Option, len(options)) + for _, fn := range prof.Function { + if fn.Name != "" && fn.SystemName != fn.Name { + continue // Already demangled. + } + copy(o, options) + if demangled := demangle.Filter(fn.SystemName, o...); demangled != fn.SystemName { + fn.Name = demangled + continue + } + // Could not demangle. Apply heuristics in case the name is + // already demangled. + name := fn.SystemName + if looksLikeDemangledCPlusPlus(name) { + if demanglerMode == "" || demanglerMode == "templates" { + name = removeMatching(name, '(', ')') + } + if demanglerMode == "" { + name = removeMatching(name, '<', '>') + } + } + fn.Name = name + } +} + +// looksLikeDemangledCPlusPlus is a heuristic to decide if a name is +// the result of demangling C++. If so, further heuristics will be +// applied to simplify the name. +func looksLikeDemangledCPlusPlus(demangled string) bool { + if strings.Contains(demangled, ".<") { // Skip java names of the form "class." + return false + } + return strings.ContainsAny(demangled, "<>[]") || strings.Contains(demangled, "::") +} + +// removeMatching removes nested instances of start..end from name. +func removeMatching(name string, start, end byte) string { + s := string(start) + string(end) + var nesting, first, current int + for index := strings.IndexAny(name[current:], s); index != -1; index = strings.IndexAny(name[current:], s) { + switch current += index; name[current] { + case start: + nesting++ + if nesting == 1 { + first = current + } + case end: + nesting-- + switch { + case nesting < 0: + return name // Mismatch, abort + case nesting == 0: + name = name[:first] + name[current+1:] + current = first - 1 + } + } + current++ + } + return name +} + +// newMapping creates a mappingTable for a profile. +func newMapping(prof *profile.Profile, obj plugin.ObjTool, ui plugin.UI, force bool) (*mappingTable, error) { + mt := &mappingTable{ + prof: prof, + segments: make(map[*profile.Mapping]plugin.ObjFile), + } + + // Identify used mappings + mappings := make(map[*profile.Mapping]bool) + for _, l := range prof.Location { + mappings[l.Mapping] = true + } + + for _, m := range prof.Mapping { + if !mappings[m] { + continue + } + + // Do not attempt to re-symbolize a mapping that has already been symbolized. + if !force && (m.HasFunctions || m.HasFilenames || m.HasLineNumbers) { + continue + } + + // Skip well-known system mappings + name := filepath.Base(m.File) + if name == "" || name == "[vdso]" || strings.HasPrefix(name, "linux-vdso") { + continue + } + + f, err := obj.Open(m.File, m.Start, m.Limit, m.Offset) + if err != nil { + ui.PrintErr("Local symbolization failed for ", name, ": ", err) + continue + } + if fid := f.BuildID(); m.BuildID != "" && fid != "" && fid != m.BuildID { + ui.PrintErr("Local symbolization failed for ", name, ": build ID mismatch") + f.Close() + continue + } + + mt.segments[m] = f + } + + return mt, nil +} + +// mappingTable contains the mechanisms for symbolization of a +// profile. +type mappingTable struct { + prof *profile.Profile + segments map[*profile.Mapping]plugin.ObjFile +} + +// Close releases any external processes being used for the mapping. +func (mt *mappingTable) close() { + for _, segment := range mt.segments { + segment.Close() + } +} diff --git a/src/internal/symbolizer/symbolizer_test.go b/src/internal/symbolizer/symbolizer_test.go new file mode 100644 index 00000000..712f373e --- /dev/null +++ b/src/internal/symbolizer/symbolizer_test.go @@ -0,0 +1,192 @@ +// Copyright 2014 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package symbolizer + +import ( + "fmt" + "regexp" + "testing" + + "internal/plugin" + "profile" + "internal/proftest" +) + +var testM = []*profile.Mapping{ + { + ID: 1, + Start: 0x1000, + Limit: 0x5000, + }, +} + +var testL = []*profile.Location{ + { + ID: 1, + Mapping: testM[0], + Address: 1000, + }, + { + ID: 2, + Mapping: testM[0], + Address: 2000, + }, + { + ID: 3, + Mapping: testM[0], + Address: 3000, + }, + { + ID: 4, + Mapping: testM[0], + Address: 4000, + }, + { + ID: 5, + Mapping: testM[0], + Address: 5000, + }, +} + +var testProfile = profile.Profile{ + DurationNanos: 10e9, + SampleType: []*profile.ValueType{ + {Type: "cpu", Unit: "cycles"}, + }, + Sample: []*profile.Sample{ + { + Location: []*profile.Location{testL[0]}, + Value: []int64{1}, + }, + { + Location: []*profile.Location{testL[1], testL[0]}, + Value: []int64{10}, + }, + { + Location: []*profile.Location{testL[2], testL[0]}, + Value: []int64{100}, + }, + { + Location: []*profile.Location{testL[3], testL[0]}, + Value: []int64{1}, + }, + { + Location: []*profile.Location{testL[4], testL[3], testL[0]}, + Value: []int64{10000}, + }, + }, + Location: testL, + Mapping: testM, + PeriodType: &profile.ValueType{Type: "cpu", Unit: "milliseconds"}, + Period: 10, +} + +func TestSymbolization(t *testing.T) { + prof := testProfile + + if prof.HasFunctions() { + t.Error("unexpected function names") + } + if prof.HasFileLines() { + t.Error("unexpected filenames or line numbers") + } + + b := mockObjTool{} + if err := localSymbolize("", &prof, b, &proftest.TestUI{T: t}); err != nil { + t.Fatalf("Symbolize(): %v", err) + } + + for _, loc := range prof.Location { + if err := checkSymbolizedLocation(loc.Address, loc.Line); err != nil { + t.Errorf("location %d: %v", loc.Address, err) + } + } + if !prof.HasFunctions() { + t.Error("missing function names") + } + if !prof.HasFileLines() { + t.Error("missing filenames or line numbers") + } +} + +func checkSymbolizedLocation(a uint64, got []profile.Line) error { + want, ok := mockAddresses[a] + if !ok { + return fmt.Errorf("unexpected address") + } + if len(want) != len(got) { + return fmt.Errorf("want len %d, got %d", len(want), len(got)) + } + + for i, w := range want { + g := got[i] + if g.Function.Name != w.Func { + return fmt.Errorf("want function: %q, got %q", w.Func, g.Function.Name) + } + if g.Function.Filename != w.File { + return fmt.Errorf("want filename: %q, got %q", w.File, g.Function.Filename) + } + if g.Line != int64(w.Line) { + return fmt.Errorf("want lineno: %d, got %d", w.Line, g.Line) + } + } + return nil +} + +var mockAddresses = map[uint64][]plugin.Frame{ + 1000: []plugin.Frame{{"fun11", "file11.src", 10}}, + 2000: []plugin.Frame{{"fun21", "file21.src", 20}, {"fun22", "file22.src", 20}}, + 3000: []plugin.Frame{{"fun31", "file31.src", 30}, {"fun32", "file32.src", 30}, {"fun33", "file33.src", 30}}, + 4000: []plugin.Frame{{"fun41", "file41.src", 40}, {"fun42", "file42.src", 40}, {"fun43", "file43.src", 40}, {"fun44", "file44.src", 40}}, + 5000: []plugin.Frame{{"fun51", "file51.src", 50}, {"fun52", "file52.src", 50}, {"fun53", "file53.src", 50}, {"fun54", "file54.src", 50}, {"fun55", "file55.src", 50}}, +} + +type mockObjTool struct{} + +func (mockObjTool) Open(file string, start, limit, offset uint64) (plugin.ObjFile, error) { + return mockObjFile{frames: mockAddresses}, nil +} + +func (mockObjTool) Disasm(file string, start, end uint64) ([]plugin.Inst, error) { + return nil, fmt.Errorf("disassembly not supported") +} + +type mockObjFile struct { + frames map[uint64][]plugin.Frame +} + +func (mockObjFile) Name() string { + return "" +} + +func (mockObjFile) Base() uint64 { + return 0 +} + +func (mockObjFile) BuildID() string { + return "" +} + +func (mf mockObjFile) SourceLine(addr uint64) ([]plugin.Frame, error) { + return mf.frames[addr], nil +} + +func (mockObjFile) Symbols(r *regexp.Regexp, addr uint64) ([]*plugin.Sym, error) { + return []*plugin.Sym{}, nil +} + +func (mockObjFile) Close() error { + return nil +} diff --git a/src/internal/symbolz/symbolz.go b/src/internal/symbolz/symbolz.go new file mode 100644 index 00000000..a294fd72 --- /dev/null +++ b/src/internal/symbolz/symbolz.go @@ -0,0 +1,161 @@ +// Copyright 2014 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package symbolz symbolizes a profile using the output from the symbolz +// service. +package symbolz + +import ( + "bytes" + "fmt" + "io" + "net/url" + "path" + "regexp" + "strconv" + "strings" + + "internal/plugin" + "profile" +) + +var ( + symbolzRE = regexp.MustCompile(`(0x[[:xdigit:]]+)\s+(.*)`) +) + +// Symbolize symbolizes profile p by parsing data returned by a +// symbolz handler. syms receives the symbolz query (hex addresses +// separated by '+') and returns the symbolz output in a string. It +// symbolizes all locations based on their addresses, regardless of +// mapping. +func Symbolize(sources plugin.MappingSources, syms func(string, string) ([]byte, error), p *profile.Profile, ui plugin.UI) error { + for _, m := range p.Mapping { + if m.HasFunctions { + continue + } + mappingSources := sources[m.File] + if m.BuildID != "" { + mappingSources = append(mappingSources, sources[m.BuildID]...) + } + for _, source := range mappingSources { + if symz := symbolz(source.Source); symz != "" { + if err := symbolizeMapping(symz, int64(source.Start)-int64(m.Start), syms, m, p); err != nil { + return err + } + m.HasFunctions = true + break + } + } + } + + return nil +} + +// symbolz returns the corresponding symbolz source for a profile URL. +func symbolz(source string) string { + if url, err := url.Parse(source); err == nil && url.Host != "" { + if strings.Contains(url.Path, "/") { + if dir := path.Dir(url.Path); dir == "/debug/pprof" { + // For Go language profile handlers in net/http/pprof package. + url.Path = "/debug/pprof/symbol" + } else { + url.Path = "/symbolz" + } + url.RawQuery = "" + return url.String() + } + } + + return "" +} + +// symbolizeMapping symbolizes locations belonging to a Mapping by querying +// a symbolz handler. An offset is applied to all addresses to take care of +// normalization occured for merged Mappings. +func symbolizeMapping(source string, offset int64, syms func(string, string) ([]byte, error), m *profile.Mapping, p *profile.Profile) error { + // Construct query of addresses to symbolize. + var a []string + for _, l := range p.Location { + if l.Mapping == m && l.Address != 0 && len(l.Line) == 0 { + // Compensate for normalization. + addr := int64(l.Address) + offset + if addr < 0 { + return fmt.Errorf("unexpected negative adjusted address, mapping %v source %d, offset %d", l.Mapping, l.Address, offset) + } + a = append(a, fmt.Sprintf("%#x", addr)) + } + } + + if len(a) == 0 { + // No addresses to symbolize. + return nil + } + + lines := make(map[uint64]profile.Line) + functions := make(map[string]*profile.Function) + + b, err := syms(source, strings.Join(a, "+")) + if err != nil { + return err + } + + buf := bytes.NewBuffer(b) + for { + l, err := buf.ReadString('\n') + + if err != nil { + if err == io.EOF { + break + } + return err + } + + if symbol := symbolzRE.FindStringSubmatch(l); len(symbol) == 3 { + addr, err := strconv.ParseInt(symbol[1], 0, 64) + if err != nil { + return fmt.Errorf("unexpected parse failure %s: %v", symbol[1], err) + } + if addr < 0 { + return fmt.Errorf("unexpected negative adjusted address, source %s, offset %d", symbol[1], offset) + } + // Reapply offset expected by the profile. + addr -= offset + + name := symbol[2] + fn := functions[name] + if fn == nil { + fn = &profile.Function{ + ID: uint64(len(p.Function) + 1), + Name: name, + SystemName: name, + } + functions[name] = fn + p.Function = append(p.Function, fn) + } + + lines[uint64(addr)] = profile.Line{Function: fn} + } + } + + for _, l := range p.Location { + if l.Mapping != m { + continue + } + if line, ok := lines[l.Address]; ok { + l.Line = []profile.Line{line} + } + } + + return nil +} diff --git a/src/internal/symbolz/symbolz_test.go b/src/internal/symbolz/symbolz_test.go new file mode 100644 index 00000000..0116a66e --- /dev/null +++ b/src/internal/symbolz/symbolz_test.go @@ -0,0 +1,100 @@ +// Copyright 2014 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package symbolz + +import ( + "fmt" + "strings" + "testing" + + "internal/plugin" + "profile" + "internal/proftest" +) + +func TestSymbolzURL(t *testing.T) { + for try, want := range map[string]string{ + "http://host:8000/profilez": "http://host:8000/symbolz", + "http://host:8000/profilez?seconds=5": "http://host:8000/symbolz", + "http://host:8000/profilez?seconds=5&format=proto": "http://host:8000/symbolz", + "http://host:8000/heapz?format=legacy": "http://host:8000/symbolz", + "http://host:8000/debug/pprof/profile": "http://host:8000/debug/pprof/symbol", + "http://host:8000/debug/pprof/profile?seconds=10": "http://host:8000/debug/pprof/symbol", + "http://host:8000/debug/pprof/heap": "http://host:8000/debug/pprof/symbol", + } { + if got := symbolz(try); got != want { + t.Errorf(`symbolz(%s)=%s, want "%s"`, try, got, want) + } + } +} + +func TestSymbolize(t *testing.T) { + m := []*profile.Mapping{ + { + ID: 1, + Start: 0x1000, + Limit: 0x5000, + BuildID: "buildid", + }, + } + p := &profile.Profile{ + Location: []*profile.Location{ + {ID: 1, Mapping: m[0], Address: 0x1000}, + {ID: 2, Mapping: m[0], Address: 0x2000}, + {ID: 3, Mapping: m[0], Address: 0x3000}, + {ID: 4, Mapping: m[0], Address: 0x4000}, + }, + Mapping: m, + } + + s := plugin.MappingSources{ + "buildid": []struct { + Source string + Start uint64 + }{ + {Source: "http://localhost:80/profilez"}, + }, + } + + if err := Symbolize(s, fetchSymbols, p, &proftest.TestUI{T: t}); err != nil { + t.Errorf("symbolz: %v", err) + } + + if l := p.Location[0]; len(l.Line) != 0 { + t.Errorf("unexpected symbolization for %#x: %v", l.Address, l.Line) + } + + for _, l := range p.Location[1:] { + if len(l.Line) != 1 { + t.Errorf("failed to symbolize %#x", l.Address) + continue + } + address := l.Address - l.Mapping.Start + if got, want := l.Line[0].Function.Name, fmt.Sprintf("%#x", address); got != want { + t.Errorf("symbolz %#x, got %s, want %s", address, got, want) + } + } +} + +func fetchSymbols(source, post string) ([]byte, error) { + var symbolz string + + addresses := strings.Split(post, "+") + // Do not symbolize the first symbol. + for _, address := range addresses[1:] { + symbolz += fmt.Sprintf("%s\t%s\n", address, address) + } + return []byte(symbolz), nil +} diff --git a/src/pprof/pprof.go b/src/pprof/pprof.go new file mode 100644 index 00000000..e87ed445 --- /dev/null +++ b/src/pprof/pprof.go @@ -0,0 +1,30 @@ +// Copyright 2014 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// pprof is a tool for collection, manipulation and visualization +// of performance profiles. +package main + +import ( + "fmt" + "os" + + "driver" +) + +func main() { + if err := driver.PProf(&driver.Options{}); err != nil { + fmt.Fprintf(os.Stderr, "pprof: %v\n", err) + } +} diff --git a/src/profile/encode.go b/src/profile/encode.go new file mode 100644 index 00000000..c1d65459 --- /dev/null +++ b/src/profile/encode.go @@ -0,0 +1,493 @@ +// Copyright 2014 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package profile + +import ( + "errors" + "fmt" + "sort" +) + +func (p *Profile) decoder() []decoder { + return profileDecoder +} + +// preEncode populates the unexported fields to be used by encode +// (with suffix X) from the corresponding exported fields. The +// exported fields are cleared up to facilitate testing. +func (p *Profile) preEncode() { + strings := make(map[string]int) + addString(strings, "") + + for _, st := range p.SampleType { + st.typeX = addString(strings, st.Type) + st.unitX = addString(strings, st.Unit) + } + + for _, s := range p.Sample { + s.labelX = nil + var keys []string + for k := range s.Label { + keys = append(keys, k) + } + sort.Strings(keys) + for _, k := range keys { + vs := s.Label[k] + for _, v := range vs { + s.labelX = append(s.labelX, + label{ + keyX: addString(strings, k), + strX: addString(strings, v), + }, + ) + } + } + var numKeys []string + for k := range s.NumLabel { + numKeys = append(numKeys, k) + } + sort.Strings(numKeys) + for _, k := range numKeys { + vs := s.NumLabel[k] + for _, v := range vs { + s.labelX = append(s.labelX, + label{ + keyX: addString(strings, k), + numX: v, + }, + ) + } + } + s.locationIDX = nil + for _, l := range s.Location { + s.locationIDX = append(s.locationIDX, l.ID) + } + } + + for _, m := range p.Mapping { + m.fileX = addString(strings, m.File) + m.buildIDX = addString(strings, m.BuildID) + } + + for _, l := range p.Location { + for i, ln := range l.Line { + if ln.Function != nil { + l.Line[i].functionIDX = ln.Function.ID + } else { + l.Line[i].functionIDX = 0 + } + } + if l.Mapping != nil { + l.mappingIDX = l.Mapping.ID + } else { + l.mappingIDX = 0 + } + } + for _, f := range p.Function { + f.nameX = addString(strings, f.Name) + f.systemNameX = addString(strings, f.SystemName) + f.filenameX = addString(strings, f.Filename) + } + + p.dropFramesX = addString(strings, p.DropFrames) + p.keepFramesX = addString(strings, p.KeepFrames) + + if pt := p.PeriodType; pt != nil { + pt.typeX = addString(strings, pt.Type) + pt.unitX = addString(strings, pt.Unit) + } + + p.commentX = nil + for _, c := range p.Comments { + p.commentX = append(p.commentX, addString(strings, c)) + } + + p.stringTable = make([]string, len(strings)) + for s, i := range strings { + p.stringTable[i] = s + } +} + +func (p *Profile) encode(b *buffer) { + for _, x := range p.SampleType { + encodeMessage(b, 1, x) + } + for _, x := range p.Sample { + encodeMessage(b, 2, x) + } + for _, x := range p.Mapping { + encodeMessage(b, 3, x) + } + for _, x := range p.Location { + encodeMessage(b, 4, x) + } + for _, x := range p.Function { + encodeMessage(b, 5, x) + } + encodeStrings(b, 6, p.stringTable) + encodeInt64Opt(b, 7, p.dropFramesX) + encodeInt64Opt(b, 8, p.keepFramesX) + encodeInt64Opt(b, 9, p.TimeNanos) + encodeInt64Opt(b, 10, p.DurationNanos) + if pt := p.PeriodType; pt != nil && (pt.typeX != 0 || pt.unitX != 0) { + encodeMessage(b, 11, p.PeriodType) + } + encodeInt64Opt(b, 12, p.Period) + encodeInt64s(b, 13, p.commentX) +} + +var profileDecoder = []decoder{ + nil, // 0 + // repeated ValueType sample_type = 1 + func(b *buffer, m message) error { + x := new(ValueType) + pp := m.(*Profile) + pp.SampleType = append(pp.SampleType, x) + return decodeMessage(b, x) + }, + // repeated Sample sample = 2 + func(b *buffer, m message) error { + x := new(Sample) + pp := m.(*Profile) + pp.Sample = append(pp.Sample, x) + return decodeMessage(b, x) + }, + // repeated Mapping mapping = 3 + func(b *buffer, m message) error { + x := new(Mapping) + pp := m.(*Profile) + pp.Mapping = append(pp.Mapping, x) + return decodeMessage(b, x) + }, + // repeated Location location = 4 + func(b *buffer, m message) error { + x := new(Location) + pp := m.(*Profile) + pp.Location = append(pp.Location, x) + return decodeMessage(b, x) + }, + // repeated Function function = 5 + func(b *buffer, m message) error { + x := new(Function) + pp := m.(*Profile) + pp.Function = append(pp.Function, x) + return decodeMessage(b, x) + }, + // repeated string string_table = 6 + func(b *buffer, m message) error { + err := decodeStrings(b, &m.(*Profile).stringTable) + if err != nil { + return err + } + if *&m.(*Profile).stringTable[0] != "" { + return errors.New("string_table[0] must be ''") + } + return nil + }, + // int64 drop_frames = 7 + func(b *buffer, m message) error { return decodeInt64(b, &m.(*Profile).dropFramesX) }, + // int64 keep_frames = 8 + func(b *buffer, m message) error { return decodeInt64(b, &m.(*Profile).keepFramesX) }, + // int64 time_nanos = 9 + func(b *buffer, m message) error { return decodeInt64(b, &m.(*Profile).TimeNanos) }, + // int64 duration_nanos = 10 + func(b *buffer, m message) error { return decodeInt64(b, &m.(*Profile).DurationNanos) }, + // ValueType period_type = 11 + func(b *buffer, m message) error { + x := new(ValueType) + pp := m.(*Profile) + pp.PeriodType = x + return decodeMessage(b, x) + }, + // int64 period = 12 + func(b *buffer, m message) error { return decodeInt64(b, &m.(*Profile).Period) }, + // repeated int64 comment = 13 + func(b *buffer, m message) error { return decodeInt64s(b, &m.(*Profile).commentX) }, +} + +// postDecode takes the unexported fields populated by decode (with +// suffix X) and populates the corresponding exported fields. +// The unexported fields are cleared up to facilitate testing. +func (p *Profile) postDecode() error { + var err error + mappings := make(map[uint64]*Mapping) + for _, m := range p.Mapping { + m.File, err = getString(p.stringTable, &m.fileX, err) + m.BuildID, err = getString(p.stringTable, &m.buildIDX, err) + mappings[m.ID] = m + } + + functions := make(map[uint64]*Function) + for _, f := range p.Function { + f.Name, err = getString(p.stringTable, &f.nameX, err) + f.SystemName, err = getString(p.stringTable, &f.systemNameX, err) + f.Filename, err = getString(p.stringTable, &f.filenameX, err) + functions[f.ID] = f + } + + locations := make(map[uint64]*Location) + for _, l := range p.Location { + l.Mapping = mappings[l.mappingIDX] + l.mappingIDX = 0 + for i, ln := range l.Line { + if id := ln.functionIDX; id != 0 { + l.Line[i].Function = functions[id] + if l.Line[i].Function == nil { + return fmt.Errorf("Function ID %d not found", id) + } + l.Line[i].functionIDX = 0 + } + } + locations[l.ID] = l + } + + for _, st := range p.SampleType { + st.Type, err = getString(p.stringTable, &st.typeX, err) + st.Unit, err = getString(p.stringTable, &st.unitX, err) + } + + for _, s := range p.Sample { + labels := make(map[string][]string) + numLabels := make(map[string][]int64) + for _, l := range s.labelX { + var key, value string + key, err = getString(p.stringTable, &l.keyX, err) + if l.strX != 0 { + value, err = getString(p.stringTable, &l.strX, err) + labels[key] = append(labels[key], value) + } else if l.numX != 0 { + numLabels[key] = append(numLabels[key], l.numX) + } + } + if len(labels) > 0 { + s.Label = labels + } + if len(numLabels) > 0 { + s.NumLabel = numLabels + } + s.Location = nil + for _, lid := range s.locationIDX { + s.Location = append(s.Location, locations[lid]) + } + s.locationIDX = nil + } + + p.DropFrames, err = getString(p.stringTable, &p.dropFramesX, err) + p.KeepFrames, err = getString(p.stringTable, &p.keepFramesX, err) + + if pt := p.PeriodType; pt == nil { + p.PeriodType = &ValueType{} + } + + if pt := p.PeriodType; pt != nil { + pt.Type, err = getString(p.stringTable, &pt.typeX, err) + pt.Unit, err = getString(p.stringTable, &pt.unitX, err) + } + + for _, i := range p.commentX { + var c string + c, err = getString(p.stringTable, &i, err) + p.Comments = append(p.Comments, c) + } + + p.commentX = nil + p.stringTable = nil + return err +} + +func (p *ValueType) decoder() []decoder { + return valueTypeDecoder +} + +func (p *ValueType) encode(b *buffer) { + encodeInt64Opt(b, 1, p.typeX) + encodeInt64Opt(b, 2, p.unitX) +} + +var valueTypeDecoder = []decoder{ + nil, // 0 + // optional int64 type = 1 + func(b *buffer, m message) error { return decodeInt64(b, &m.(*ValueType).typeX) }, + // optional int64 unit = 2 + func(b *buffer, m message) error { return decodeInt64(b, &m.(*ValueType).unitX) }, +} + +func (p *Sample) decoder() []decoder { + return sampleDecoder +} + +func (p *Sample) encode(b *buffer) { + encodeUint64s(b, 1, p.locationIDX) + encodeInt64s(b, 2, p.Value) + for _, x := range p.labelX { + encodeMessage(b, 3, x) + } +} + +var sampleDecoder = []decoder{ + nil, // 0 + // repeated uint64 location = 1 + func(b *buffer, m message) error { return decodeUint64s(b, &m.(*Sample).locationIDX) }, + // repeated int64 value = 2 + func(b *buffer, m message) error { return decodeInt64s(b, &m.(*Sample).Value) }, + // repeated Label label = 3 + func(b *buffer, m message) error { + s := m.(*Sample) + n := len(s.labelX) + s.labelX = append(s.labelX, label{}) + return decodeMessage(b, &s.labelX[n]) + }, +} + +func (p label) decoder() []decoder { + return labelDecoder +} + +func (p label) encode(b *buffer) { + encodeInt64Opt(b, 1, p.keyX) + encodeInt64Opt(b, 2, p.strX) + encodeInt64Opt(b, 3, p.numX) +} + +var labelDecoder = []decoder{ + nil, // 0 + // optional int64 key = 1 + func(b *buffer, m message) error { return decodeInt64(b, &m.(*label).keyX) }, + // optional int64 str = 2 + func(b *buffer, m message) error { return decodeInt64(b, &m.(*label).strX) }, + // optional int64 num = 3 + func(b *buffer, m message) error { return decodeInt64(b, &m.(*label).numX) }, +} + +func (p *Mapping) decoder() []decoder { + return mappingDecoder +} + +func (p *Mapping) encode(b *buffer) { + encodeUint64Opt(b, 1, p.ID) + encodeUint64Opt(b, 2, p.Start) + encodeUint64Opt(b, 3, p.Limit) + encodeUint64Opt(b, 4, p.Offset) + encodeInt64Opt(b, 5, p.fileX) + encodeInt64Opt(b, 6, p.buildIDX) + encodeBoolOpt(b, 7, p.HasFunctions) + encodeBoolOpt(b, 8, p.HasFilenames) + encodeBoolOpt(b, 9, p.HasLineNumbers) + encodeBoolOpt(b, 10, p.HasInlineFrames) +} + +var mappingDecoder = []decoder{ + nil, // 0 + func(b *buffer, m message) error { return decodeUint64(b, &m.(*Mapping).ID) }, // optional uint64 id = 1 + func(b *buffer, m message) error { return decodeUint64(b, &m.(*Mapping).Start) }, // optional uint64 memory_offset = 2 + func(b *buffer, m message) error { return decodeUint64(b, &m.(*Mapping).Limit) }, // optional uint64 memory_limit = 3 + func(b *buffer, m message) error { return decodeUint64(b, &m.(*Mapping).Offset) }, // optional uint64 file_offset = 4 + func(b *buffer, m message) error { return decodeInt64(b, &m.(*Mapping).fileX) }, // optional int64 filename = 5 + func(b *buffer, m message) error { return decodeInt64(b, &m.(*Mapping).buildIDX) }, // optional int64 build_id = 6 + func(b *buffer, m message) error { return decodeBool(b, &m.(*Mapping).HasFunctions) }, // optional bool has_functions = 7 + func(b *buffer, m message) error { return decodeBool(b, &m.(*Mapping).HasFilenames) }, // optional bool has_filenames = 8 + func(b *buffer, m message) error { return decodeBool(b, &m.(*Mapping).HasLineNumbers) }, // optional bool has_line_numbers = 9 + func(b *buffer, m message) error { return decodeBool(b, &m.(*Mapping).HasInlineFrames) }, // optional bool has_inline_frames = 10 +} + +func (p *Location) decoder() []decoder { + return locationDecoder +} + +func (p *Location) encode(b *buffer) { + encodeUint64Opt(b, 1, p.ID) + encodeUint64Opt(b, 2, p.mappingIDX) + encodeUint64Opt(b, 3, p.Address) + for i := range p.Line { + encodeMessage(b, 4, &p.Line[i]) + } +} + +var locationDecoder = []decoder{ + nil, // 0 + func(b *buffer, m message) error { return decodeUint64(b, &m.(*Location).ID) }, // optional uint64 id = 1; + func(b *buffer, m message) error { return decodeUint64(b, &m.(*Location).mappingIDX) }, // optional uint64 mapping_id = 2; + func(b *buffer, m message) error { return decodeUint64(b, &m.(*Location).Address) }, // optional uint64 address = 3; + func(b *buffer, m message) error { // repeated Line line = 4 + pp := m.(*Location) + n := len(pp.Line) + pp.Line = append(pp.Line, Line{}) + return decodeMessage(b, &pp.Line[n]) + }, +} + +func (p *Line) decoder() []decoder { + return lineDecoder +} + +func (p *Line) encode(b *buffer) { + encodeUint64Opt(b, 1, p.functionIDX) + encodeInt64Opt(b, 2, p.Line) +} + +var lineDecoder = []decoder{ + nil, // 0 + // optional uint64 function_id = 1 + func(b *buffer, m message) error { return decodeUint64(b, &m.(*Line).functionIDX) }, + // optional int64 line = 2 + func(b *buffer, m message) error { return decodeInt64(b, &m.(*Line).Line) }, +} + +func (p *Function) decoder() []decoder { + return functionDecoder +} + +func (p *Function) encode(b *buffer) { + encodeUint64Opt(b, 1, p.ID) + encodeInt64Opt(b, 2, p.nameX) + encodeInt64Opt(b, 3, p.systemNameX) + encodeInt64Opt(b, 4, p.filenameX) + encodeInt64Opt(b, 5, p.StartLine) +} + +var functionDecoder = []decoder{ + nil, // 0 + // optional uint64 id = 1 + func(b *buffer, m message) error { return decodeUint64(b, &m.(*Function).ID) }, + // optional int64 function_name = 2 + func(b *buffer, m message) error { return decodeInt64(b, &m.(*Function).nameX) }, + // optional int64 function_system_name = 3 + func(b *buffer, m message) error { return decodeInt64(b, &m.(*Function).systemNameX) }, + // repeated int64 filename = 4 + func(b *buffer, m message) error { return decodeInt64(b, &m.(*Function).filenameX) }, + // optional int64 start_line = 5 + func(b *buffer, m message) error { return decodeInt64(b, &m.(*Function).StartLine) }, +} + +func addString(strings map[string]int, s string) int64 { + i, ok := strings[s] + if !ok { + i = len(strings) + strings[s] = i + } + return int64(i) +} + +func getString(strings []string, strng *int64, err error) (string, error) { + if err != nil { + return "", err + } + s := int(*strng) + if s < 0 || s >= len(strings) { + return "", errMalformed + } + *strng = 0 + return strings[s], nil +} diff --git a/src/profile/filter.go b/src/profile/filter.go new file mode 100644 index 00000000..f79595eb --- /dev/null +++ b/src/profile/filter.go @@ -0,0 +1,171 @@ +// Copyright 2014 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package profile + +// Implements methods to filter samples from profiles. + +import "regexp" + +// FilterSamplesByName filters the samples in a profile and only keeps +// samples where at least one frame matches focus but none match ignore. +// Returns true is the corresponding regexp matched at least one sample. +func (p *Profile) FilterSamplesByName(focus, ignore, hide, show *regexp.Regexp) (fm, im, hm, hnm bool) { + focusOrIgnore := make(map[uint64]bool) + hidden := make(map[uint64]bool) + for _, l := range p.Location { + if ignore != nil && l.matchesName(ignore) { + im = true + focusOrIgnore[l.ID] = false + } else if focus == nil || l.matchesName(focus) { + fm = true + focusOrIgnore[l.ID] = true + } + + if hide != nil && l.matchesName(hide) { + hm = true + l.Line = l.unmatchedLines(hide) + if len(l.Line) == 0 { + hidden[l.ID] = true + } + } + if show != nil { + hnm = true + l.Line = l.matchedLines(show) + if len(l.Line) == 0 { + hidden[l.ID] = true + } + } + } + + s := make([]*Sample, 0, len(p.Sample)) + for _, sample := range p.Sample { + if focusedAndNotIgnored(sample.Location, focusOrIgnore) { + if len(hidden) > 0 { + var locs []*Location + for _, loc := range sample.Location { + if !hidden[loc.ID] { + locs = append(locs, loc) + } + } + if len(locs) == 0 { + // Remove sample with no locations (by not adding it to s). + continue + } + sample.Location = locs + } + s = append(s, sample) + } + } + p.Sample = s + + return +} + +// matchesName returns whether the location matches the regular +// expression. It checks any available function names, file names, and +// mapping object filename. +func (loc *Location) matchesName(re *regexp.Regexp) bool { + for _, ln := range loc.Line { + if fn := ln.Function; fn != nil { + if re.MatchString(fn.Name) || re.MatchString(fn.Filename) { + return true + } + } + } + if m := loc.Mapping; m != nil && re.MatchString(m.File) { + return true + } + return false +} + +// unmatchedLines returns the lines in the location that do not match +// the regular expression. +func (loc *Location) unmatchedLines(re *regexp.Regexp) []Line { + if m := loc.Mapping; m != nil && re.MatchString(m.File) { + return nil + } + var lines []Line + for _, ln := range loc.Line { + if fn := ln.Function; fn != nil { + if re.MatchString(fn.Name) || re.MatchString(fn.Filename) { + continue + } + } + lines = append(lines, ln) + } + return lines +} + +// matchedLines returns the lines in the location that match +// the regular expression. +func (loc *Location) matchedLines(re *regexp.Regexp) []Line { + var lines []Line + for _, ln := range loc.Line { + if fn := ln.Function; fn != nil { + if !re.MatchString(fn.Name) && !re.MatchString(fn.Filename) { + continue + } + } + lines = append(lines, ln) + } + return lines +} + +// focusedAndNotIgnored looks up a slice of ids against a map of +// focused/ignored locations. The map only contains locations that are +// explicitly focused or ignored. Returns whether there is at least +// one focused location but no ignored locations. +func focusedAndNotIgnored(locs []*Location, m map[uint64]bool) bool { + var f bool + for _, loc := range locs { + if focus, focusOrIgnore := m[loc.ID]; focusOrIgnore { + if focus { + // Found focused location. Must keep searching in case there + // is an ignored one as well. + f = true + } else { + // Found ignored location. Can return false right away. + return false + } + } + } + return f +} + +// TagMatch selects tags for filtering +type TagMatch func(s *Sample) bool + +// FilterSamplesByTag removes all samples from the profile, except +// those that match focus and do not match the ignore regular +// expression. +func (p *Profile) FilterSamplesByTag(focus, ignore TagMatch) (fm, im bool) { + samples := make([]*Sample, 0, len(p.Sample)) + for _, s := range p.Sample { + focused, ignored := true, false + if focus != nil { + focused = focus(s) + } + if ignore != nil { + ignored = ignore(s) + } + fm = fm || focused + im = im || ignored + if focused && !ignored { + samples = append(samples, s) + } + } + p.Sample = samples + return +} diff --git a/src/profile/legacy_java_profile.go b/src/profile/legacy_java_profile.go new file mode 100644 index 00000000..db2ee567 --- /dev/null +++ b/src/profile/legacy_java_profile.go @@ -0,0 +1,308 @@ +// Copyright 2014 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// This file implements parsers to convert java legacy profiles into +// the profile.proto format. + +package profile + +import ( + "bytes" + "fmt" + "io" + "path/filepath" + "regexp" + "strconv" + "strings" +) + +var ( + attributeRx = regexp.MustCompile(`([\w ]+)=([\w ]+)`) + javaSampleRx = regexp.MustCompile(` *(\d+) +(\d+) +@ +([ x0-9a-f]*)`) + javaLocationRx = regexp.MustCompile(`^\s*0x([[:xdigit:]]+)\s+(.*)\s*$`) + javaLocationFileLineRx = regexp.MustCompile(`^(.*)\s+\((.+):(-?[[:digit:]]+)\)$`) + javaLocationPathRx = regexp.MustCompile(`^(.*)\s+\((.*)\)$`) +) + +// javaCPUProfile returns a new Profile from profilez data. +// b is the profile bytes after the header, period is the profiling +// period, and parse is a function to parse 8-byte chunks from the +// profile in its native endianness. +func javaCPUProfile(b []byte, period int64, parse func(b []byte) (uint64, []byte)) (*Profile, error) { + p := &Profile{ + Period: period * 1000, + PeriodType: &ValueType{Type: "cpu", Unit: "nanoseconds"}, + SampleType: []*ValueType{{Type: "samples", Unit: "count"}, {Type: "cpu", Unit: "nanoseconds"}}, + } + var err error + var locs map[uint64]*Location + if b, locs, err = parseCPUSamples(b, parse, false, p); err != nil { + return nil, err + } + + if err = parseJavaLocations(b, locs, p); err != nil { + return nil, err + } + + // Strip out addresses for better merge. + if err = p.Aggregate(true, true, true, true, false); err != nil { + return nil, err + } + + return p, nil +} + +// parseJavaProfile returns a new profile from heapz or contentionz +// data. b is the profile bytes after the header. +func parseJavaProfile(b []byte) (*Profile, error) { + h := bytes.SplitAfterN(b, []byte("\n"), 2) + if len(h) < 2 { + return nil, errUnrecognized + } + + p := &Profile{ + PeriodType: &ValueType{}, + } + header := string(bytes.TrimSpace(h[0])) + + var err error + var pType string + switch header { + case "--- heapz 1 ---": + pType = "heap" + case "--- contentionz 1 ---": + pType = "contention" + default: + return nil, errUnrecognized + } + + if b, err = parseJavaHeader(pType, h[1], p); err != nil { + return nil, err + } + var locs map[uint64]*Location + if b, locs, err = parseJavaSamples(pType, b, p); err != nil { + return nil, err + } + if err = parseJavaLocations(b, locs, p); err != nil { + return nil, err + } + + // Strip out addresses for better merge. + if err = p.Aggregate(true, true, true, true, false); err != nil { + return nil, err + } + + return p, nil +} + +// parseJavaHeader parses the attribute section on a java profile and +// populates a profile. Returns the remainder of the buffer after all +// attributes. +func parseJavaHeader(pType string, b []byte, p *Profile) ([]byte, error) { + nextNewLine := bytes.IndexByte(b, byte('\n')) + for nextNewLine != -1 { + line := string(bytes.TrimSpace(b[0:nextNewLine])) + if line != "" { + h := attributeRx.FindStringSubmatch(line) + if h == nil { + // Not a valid attribute, exit. + return b, nil + } + + attribute, value := strings.TrimSpace(h[1]), strings.TrimSpace(h[2]) + var err error + switch pType + "/" + attribute { + case "heap/format", "cpu/format", "contention/format": + if value != "java" { + return nil, errUnrecognized + } + case "heap/resolution": + p.SampleType = []*ValueType{ + {Type: "inuse_objects", Unit: "count"}, + {Type: "inuse_space", Unit: value}, + } + case "contention/resolution": + p.SampleType = []*ValueType{ + {Type: "contentions", Unit: value}, + {Type: "delay", Unit: value}, + } + case "contention/sampling period": + p.PeriodType = &ValueType{ + Type: "contentions", Unit: "count", + } + if p.Period, err = strconv.ParseInt(value, 0, 64); err != nil { + return nil, fmt.Errorf("failed to parse attribute %s: %v", line, err) + } + case "contention/ms since reset": + millis, err := strconv.ParseInt(value, 0, 64) + if err != nil { + return nil, fmt.Errorf("failed to parse attribute %s: %v", line, err) + } + p.DurationNanos = millis * 1000 * 1000 + default: + return nil, errUnrecognized + } + } + // Grab next line. + b = b[nextNewLine+1:] + nextNewLine = bytes.IndexByte(b, byte('\n')) + } + return b, nil +} + +// parseJavaSamples parses the samples from a java profile and +// populates the Samples in a profile. Returns the remainder of the +// buffer after the samples. +func parseJavaSamples(pType string, b []byte, p *Profile) ([]byte, map[uint64]*Location, error) { + nextNewLine := bytes.IndexByte(b, byte('\n')) + locs := make(map[uint64]*Location) + for nextNewLine != -1 { + line := string(bytes.TrimSpace(b[0:nextNewLine])) + if line != "" { + sample := javaSampleRx.FindStringSubmatch(line) + if sample == nil { + // Not a valid sample, exit. + return b, locs, nil + } + + // Java profiles have data/fields inverted compared to other + // profile types. + value1, value2, addrs := sample[2], sample[1], sample[3] + + var sloc []*Location + for _, addr := range parseHexAddresses(addrs) { + loc := locs[addr] + if locs[addr] == nil { + loc = &Location{ + Address: addr, + } + p.Location = append(p.Location, loc) + locs[addr] = loc + } + sloc = append(sloc, loc) + } + s := &Sample{ + Value: make([]int64, 2), + Location: sloc, + } + + var err error + if s.Value[0], err = strconv.ParseInt(value1, 0, 64); err != nil { + return nil, nil, fmt.Errorf("parsing sample %s: %v", line, err) + } + if s.Value[1], err = strconv.ParseInt(value2, 0, 64); err != nil { + return nil, nil, fmt.Errorf("parsing sample %s: %v", line, err) + } + + switch pType { + case "heap": + const javaHeapzSamplingRate = 524288 // 512K + s.NumLabel = map[string][]int64{"bytes": []int64{s.Value[1] / s.Value[0]}} + s.Value[0], s.Value[1] = scaleHeapSample(s.Value[0], s.Value[1], javaHeapzSamplingRate) + case "contention": + if period := p.Period; period != 0 { + s.Value[0] = s.Value[0] * p.Period + s.Value[1] = s.Value[1] * p.Period + } + } + p.Sample = append(p.Sample, s) + } + // Grab next line. + b = b[nextNewLine+1:] + nextNewLine = bytes.IndexByte(b, byte('\n')) + } + return b, locs, nil +} + +// parseJavaLocations parses the location information in a java +// profile and populates the Locations in a profile. It uses the +// location addresses from the profile as both the ID of each +// location. +func parseJavaLocations(b []byte, locs map[uint64]*Location, p *Profile) error { + r := bytes.NewBuffer(b) + fns := make(map[string]*Function) + for { + line, err := r.ReadString('\n') + if err != nil { + if err != io.EOF { + return err + } + if line == "" { + break + } + } + + if line = strings.TrimSpace(line); line == "" { + continue + } + + jloc := javaLocationRx.FindStringSubmatch(line) + if len(jloc) != 3 { + continue + } + addr, err := strconv.ParseUint(jloc[1], 16, 64) + if err != nil { + return fmt.Errorf("parsing sample %s: %v", line, err) + } + loc := locs[addr] + if loc == nil { + // Unused/unseen + continue + } + var lineFunc, lineFile string + var lineNo int64 + + if fileLine := javaLocationFileLineRx.FindStringSubmatch(jloc[2]); len(fileLine) == 4 { + // Found a line of the form: "function (file:line)" + lineFunc, lineFile = fileLine[1], fileLine[2] + if n, err := strconv.ParseInt(fileLine[3], 10, 64); err == nil && n > 0 { + lineNo = n + } + } else if filePath := javaLocationPathRx.FindStringSubmatch(jloc[2]); len(filePath) == 3 { + // If there's not a file:line, it's a shared library path. + // The path isn't interesting, so just give the .so. + lineFunc, lineFile = filePath[1], filepath.Base(filePath[2]) + } else if strings.Contains(jloc[2], "generated stub/JIT") { + lineFunc = "STUB" + } else { + // Treat whole line as the function name. This is used by the + // java agent for internal states such as "GC" or "VM". + lineFunc = jloc[2] + } + fn := fns[lineFunc] + + if fn == nil { + fn = &Function{ + Name: lineFunc, + SystemName: lineFunc, + Filename: lineFile, + } + fns[lineFunc] = fn + p.Function = append(p.Function, fn) + } + loc.Line = []Line{ + { + Function: fn, + Line: lineNo, + }, + } + loc.Address = 0 + } + + p.remapLocationIDs() + p.remapFunctionIDs() + p.remapMappingIDs() + + return nil +} diff --git a/src/profile/legacy_profile.go b/src/profile/legacy_profile.go new file mode 100644 index 00000000..17c22241 --- /dev/null +++ b/src/profile/legacy_profile.go @@ -0,0 +1,1255 @@ +// Copyright 2014 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// This file implements parsers to convert legacy profiles into the +// profile.proto format. + +package profile + +import ( + "bufio" + "bytes" + "fmt" + "io" + "math" + "regexp" + "strconv" + "strings" +) + +var ( + countStartRE = regexp.MustCompile(`\A(\w+) profile: total \d+\n\z`) + countRE = regexp.MustCompile(`\A(\d+) @(( 0x[0-9a-f]+)+)\n\z`) + + heapHeaderRE = regexp.MustCompile(`heap profile: *(\d+): *(\d+) *\[ *(\d+): *(\d+) *\] *@ *(heap[_a-z0-9]*)/?(\d*)`) + heapSampleRE = regexp.MustCompile(`(-?\d+): *(-?\d+) *\[ *(\d+): *(\d+) *] @([ x0-9a-f]*)`) + + contentionSampleRE = regexp.MustCompile(`(\d+) *(\d+) @([ x0-9a-f]*)`) + + hexNumberRE = regexp.MustCompile(`0x[0-9a-f]+`) + + growthHeaderRE = regexp.MustCompile(`heap profile: *(\d+): *(\d+) *\[ *(\d+): *(\d+) *\] @ growthz`) + + fragmentationHeaderRE = regexp.MustCompile(`heap profile: *(\d+): *(\d+) *\[ *(\d+): *(\d+) *\] @ fragmentationz`) + + threadzStartRE = regexp.MustCompile(`--- threadz \d+ ---`) + threadStartRE = regexp.MustCompile(`--- Thread ([[:xdigit:]]+) \(name: (.*)/(\d+)\) stack: ---`) + + procMapsRE = regexp.MustCompile(`([[:xdigit:]]+)-([[:xdigit:]]+)\s+([-rwxp]+)\s+([[:xdigit:]]+)\s+([[:xdigit:]]+):([[:xdigit:]]+)\s+([[:digit:]]+)\s*(\S+)?`) + + briefMapsRE = regexp.MustCompile(`\s*([[:xdigit:]]+)-([[:xdigit:]]+):\s*(\S+)(\s.*@)?([[:xdigit:]]+)?`) +) + +func isSpaceOrComment(line string) bool { + trimmed := strings.TrimSpace(line) + return len(trimmed) == 0 || trimmed[0] == '#' +} + +// parseGoCount parses a Go count profile (e.g., threadcreate or +// goroutine) and returns a new Profile. +func parseGoCount(b []byte) (*Profile, error) { + r := bytes.NewBuffer(b) + + var line string + var err error + for { + // Skip past comments and empty lines seeking a real header. + line, err = r.ReadString('\n') + if err != nil { + return nil, err + } + if !isSpaceOrComment(line) { + break + } + } + + m := countStartRE.FindStringSubmatch(line) + if m == nil { + return nil, errUnrecognized + } + profileType := string(m[1]) + p := &Profile{ + PeriodType: &ValueType{Type: profileType, Unit: "count"}, + Period: 1, + SampleType: []*ValueType{{Type: profileType, Unit: "count"}}, + } + locations := make(map[uint64]*Location) + for { + line, err = r.ReadString('\n') + if err != nil { + if err == io.EOF { + break + } + return nil, err + } + if isSpaceOrComment(line) { + continue + } + if strings.HasPrefix(line, "---") { + break + } + m := countRE.FindStringSubmatch(line) + if m == nil { + return nil, errMalformed + } + n, err := strconv.ParseInt(string(m[1]), 0, 64) + if err != nil { + return nil, errMalformed + } + fields := strings.Fields(string(m[2])) + locs := make([]*Location, 0, len(fields)) + for _, stk := range fields { + addr, err := strconv.ParseUint(stk, 0, 64) + if err != nil { + return nil, errMalformed + } + // Adjust all frames by -1 (except the leaf) to land on top of + // the call instruction. + if len(locs) > 0 { + addr-- + } + loc := locations[addr] + if loc == nil { + loc = &Location{ + Address: addr, + } + locations[addr] = loc + p.Location = append(p.Location, loc) + } + locs = append(locs, loc) + } + p.Sample = append(p.Sample, &Sample{ + Location: locs, + Value: []int64{n}, + }) + } + + if err = parseAdditionalSections(strings.TrimSpace(line), r, p); err != nil { + return nil, err + } + return p, nil +} + +// remapLocationIDs ensures there is a location for each address +// referenced by a sample, and remaps the samples to point to the new +// location ids. +func (p *Profile) remapLocationIDs() { + seen := make(map[*Location]bool, len(p.Location)) + var locs []*Location + + for _, s := range p.Sample { + for _, l := range s.Location { + if seen[l] { + continue + } + l.ID = uint64(len(locs) + 1) + locs = append(locs, l) + seen[l] = true + } + } + p.Location = locs +} + +func (p *Profile) remapFunctionIDs() { + seen := make(map[*Function]bool, len(p.Function)) + var fns []*Function + + for _, l := range p.Location { + for _, ln := range l.Line { + fn := ln.Function + if fn == nil || seen[fn] { + continue + } + fn.ID = uint64(len(fns) + 1) + fns = append(fns, fn) + seen[fn] = true + } + } + p.Function = fns +} + +// remapMappingIDs matches location addresses with existing mappings +// and updates them appropriately. This is O(N*M), if this ever shows +// up as a bottleneck, evaluate sorting the mappings and doing a +// binary search, which would make it O(N*log(M)). +func (p *Profile) remapMappingIDs() { + // Some profile handlers will incorrectly set regions for the main + // executable if its section is remapped. Fix them through heuristics. + + if len(p.Mapping) > 0 { + // Remove the initial mapping if named '/anon_hugepage' and has a + // consecutive adjacent mapping. + if m := p.Mapping[0]; strings.HasPrefix(m.File, "/anon_hugepage") { + if len(p.Mapping) > 1 && m.Limit == p.Mapping[1].Start { + p.Mapping = p.Mapping[1:] + } + } + } + + // Subtract the offset from the start of the main mapping if it + // ends up at a recognizable start address. + if len(p.Mapping) > 0 { + const expectedStart = 0x400000 + if m := p.Mapping[0]; m.Start-m.Offset == expectedStart { + m.Start = expectedStart + m.Offset = 0 + } + } + + // Associate each location with an address to the corresponding + // mapping. Create fake mapping if a suitable one isn't found. + var fake *Mapping +nextLocation: + for _, l := range p.Location { + a := l.Address + if l.Mapping != nil || a == 0 { + continue + } + for _, m := range p.Mapping { + if m.Start <= a && a < m.Limit { + l.Mapping = m + continue nextLocation + } + } + // Work around legacy handlers failing to encode the first + // part of mappings split into adjacent ranges. + for _, m := range p.Mapping { + if m.Offset != 0 && m.Start-m.Offset <= a && a < m.Start { + m.Start -= m.Offset + m.Offset = 0 + l.Mapping = m + continue nextLocation + } + } + // If there is still no mapping, create a fake one. + // This is important for the Go legacy handler, which produced + // no mappings. + if fake == nil { + fake = &Mapping{ + ID: 1, + Limit: ^uint64(0), + } + p.Mapping = append(p.Mapping, fake) + } + l.Mapping = fake + } + + // Reset all mapping IDs. + for i, m := range p.Mapping { + m.ID = uint64(i + 1) + } +} + +var cpuInts = []func([]byte) (uint64, []byte){ + get32l, + get32b, + get64l, + get64b, +} + +func get32l(b []byte) (uint64, []byte) { + if len(b) < 4 { + return 0, nil + } + return uint64(b[0]) | uint64(b[1])<<8 | uint64(b[2])<<16 | uint64(b[3])<<24, b[4:] +} + +func get32b(b []byte) (uint64, []byte) { + if len(b) < 4 { + return 0, nil + } + return uint64(b[3]) | uint64(b[2])<<8 | uint64(b[1])<<16 | uint64(b[0])<<24, b[4:] +} + +func get64l(b []byte) (uint64, []byte) { + if len(b) < 8 { + return 0, nil + } + return uint64(b[0]) | uint64(b[1])<<8 | uint64(b[2])<<16 | uint64(b[3])<<24 | uint64(b[4])<<32 | uint64(b[5])<<40 | uint64(b[6])<<48 | uint64(b[7])<<56, b[8:] +} + +func get64b(b []byte) (uint64, []byte) { + if len(b) < 8 { + return 0, nil + } + return uint64(b[7]) | uint64(b[6])<<8 | uint64(b[5])<<16 | uint64(b[4])<<24 | uint64(b[3])<<32 | uint64(b[2])<<40 | uint64(b[1])<<48 | uint64(b[0])<<56, b[8:] +} + +// parseCPU parses a profilez legacy profile and returns a newly +// populated Profile. +// +// The general format for profilez samples is a sequence of words in +// binary format. The first words are a header with the following data: +// 1st word -- 0 +// 2nd word -- 3 +// 3rd word -- 0 if a c++ application, 1 if a java application. +// 4th word -- Sampling period (in microseconds). +// 5th word -- Padding. +func parseCPU(b []byte) (*Profile, error) { + var parse func([]byte) (uint64, []byte) + var n1, n2, n3, n4, n5 uint64 + for _, parse = range cpuInts { + var tmp []byte + n1, tmp = parse(b) + n2, tmp = parse(tmp) + n3, tmp = parse(tmp) + n4, tmp = parse(tmp) + n5, tmp = parse(tmp) + + if tmp != nil && n1 == 0 && n2 == 3 && n3 == 0 && n4 > 0 && n5 == 0 { + b = tmp + return cpuProfile(b, int64(n4), parse) + } + if tmp != nil && n1 == 0 && n2 == 3 && n3 == 1 && n4 > 0 && n5 == 0 { + b = tmp + return javaCPUProfile(b, int64(n4), parse) + } + } + return nil, errUnrecognized +} + +// cpuProfile returns a new Profile from C++ profilez data. +// b is the profile bytes after the header, period is the profiling +// period, and parse is a function to parse 8-byte chunks from the +// profile in its native endianness. +func cpuProfile(b []byte, period int64, parse func(b []byte) (uint64, []byte)) (*Profile, error) { + p := &Profile{ + Period: period * 1000, + PeriodType: &ValueType{Type: "cpu", Unit: "nanoseconds"}, + SampleType: []*ValueType{ + {Type: "samples", Unit: "count"}, + {Type: "cpu", Unit: "nanoseconds"}, + }, + } + var err error + if b, _, err = parseCPUSamples(b, parse, true, p); err != nil { + return nil, err + } + + // If *most* samples have the same second-to-the-bottom frame, it + // strongly suggests that it is an uninteresting artifact of + // measurement -- a stack frame pushed by the signal handler. The + // bottom frame is always correct as it is picked up from the signal + // structure, not the stack. Check if this is the case and if so, + // remove. + + // Remove up to two frames. + maxiter := 2 + // Allow one different sample for this many samples with the same + // second-to-last frame. + similarSamples := 32 + margin := len(p.Sample) / similarSamples + + for iter := 0; iter < maxiter; iter++ { + addr1 := make(map[uint64]int) + for _, s := range p.Sample { + if len(s.Location) > 1 { + a := s.Location[1].Address + addr1[a] = addr1[a] + 1 + } + } + + for id1, count := range addr1 { + if count >= len(p.Sample)-margin { + // Found uninteresting frame, strip it out from all samples + for _, s := range p.Sample { + if len(s.Location) > 1 && s.Location[1].Address == id1 { + s.Location = append(s.Location[:1], s.Location[2:]...) + } + } + break + } + } + } + + // The profile handler may duplicate the leaf frame, because it gets + // its address both from stack unwinding and from the signal + // context. Detect this and delete the duplicate, which has been + // adjusted by -1. The leaf address should not be adjusted as it is + // not a call. + for _, s := range p.Sample { + if len(s.Location) > 1 && s.Location[0].Address == s.Location[1].Address+1 { + s.Location = append(s.Location[:1], s.Location[2:]...) + } + } + + if err := p.ParseMemoryMap(bytes.NewBuffer(b)); err != nil { + return nil, err + } + return p, nil +} + +// parseCPUSamples parses a collection of profilez samples from a +// profile. +// +// profilez samples are a repeated sequence of stack frames of the +// form: +// 1st word -- The number of times this stack was encountered. +// 2nd word -- The size of the stack (StackSize). +// 3rd word -- The first address on the stack. +// ... +// StackSize + 2 -- The last address on the stack +// The last stack trace is of the form: +// 1st word -- 0 +// 2nd word -- 1 +// 3rd word -- 0 +// +// Addresses from stack traces may point to the next instruction after +// each call. Optionally adjust by -1 to land somewhere on the actual +// call (except for the leaf, which is not a call). +func parseCPUSamples(b []byte, parse func(b []byte) (uint64, []byte), adjust bool, p *Profile) ([]byte, map[uint64]*Location, error) { + locs := make(map[uint64]*Location) + for len(b) > 0 { + var count, nstk uint64 + count, b = parse(b) + nstk, b = parse(b) + if b == nil || nstk > uint64(len(b)/4) { + return nil, nil, errUnrecognized + } + var sloc []*Location + addrs := make([]uint64, nstk) + for i := 0; i < int(nstk); i++ { + addrs[i], b = parse(b) + } + + if count == 0 && nstk == 1 && addrs[0] == 0 { + // End of data marker + break + } + for i, addr := range addrs { + if adjust && i > 0 { + addr-- + } + loc := locs[addr] + if loc == nil { + loc = &Location{ + Address: addr, + } + locs[addr] = loc + p.Location = append(p.Location, loc) + } + sloc = append(sloc, loc) + } + p.Sample = append(p.Sample, + &Sample{ + Value: []int64{int64(count), int64(count) * int64(p.Period)}, + Location: sloc, + }) + } + // Reached the end without finding the EOD marker. + return b, locs, nil +} + +// parseHeap parses a heapz legacy or a growthz profile and +// returns a newly populated Profile. +func parseHeap(b []byte) (p *Profile, err error) { + r := bytes.NewBuffer(b) + l, err := r.ReadString('\n') + if err != nil { + return nil, errUnrecognized + } + + p = &Profile{} + + sampling := "" + hasAlloc := false + + p.PeriodType = &ValueType{Type: "space", Unit: "bytes"} + if header := heapHeaderRE.FindStringSubmatch(l); header != nil { + sampling, p.Period, hasAlloc, err = parseHeapHeader(l) + if err != nil { + return nil, err + } + } else if header = growthHeaderRE.FindStringSubmatch(l); header != nil { + p.Period = 1 + } else if header = fragmentationHeaderRE.FindStringSubmatch(l); header != nil { + p.Period = 1 + } else { + return nil, errUnrecognized + } + + if hasAlloc { + // Put alloc before inuse so that default pprof selection + // will prefer inuse_space. + p.SampleType = []*ValueType{ + {Type: "alloc_objects", Unit: "count"}, + {Type: "alloc_space", Unit: "bytes"}, + {Type: "inuse_objects", Unit: "count"}, + {Type: "inuse_space", Unit: "bytes"}, + } + } else { + p.SampleType = []*ValueType{ + {Type: "objects", Unit: "count"}, + {Type: "space", Unit: "bytes"}, + } + } + + locs := make(map[uint64]*Location) + for { + l, err = r.ReadString('\n') + if err != nil { + if err != io.EOF { + return nil, err + } + + if l == "" { + break + } + } + + if isSpaceOrComment(l) { + continue + } + l = strings.TrimSpace(l) + + if sectionTrigger(l) != unrecognizedSection { + break + } + + value, blocksize, addrs, err := parseHeapSample(l, p.Period, sampling, hasAlloc) + if err != nil { + return nil, err + } + + var sloc []*Location + for i, addr := range addrs { + // Addresses from stack traces point to the next instruction after + // each call. Adjust by -1 to land somewhere on the actual call + // (except for the leaf, which is not a call). + if i > 0 { + addr-- + } + loc := locs[addr] + if locs[addr] == nil { + loc = &Location{ + Address: addr, + } + p.Location = append(p.Location, loc) + locs[addr] = loc + } + sloc = append(sloc, loc) + } + + p.Sample = append(p.Sample, &Sample{ + Value: value, + Location: sloc, + NumLabel: map[string][]int64{"bytes": []int64{blocksize}}, + }) + } + + if err = parseAdditionalSections(l, r, p); err != nil { + return nil, err + } + return p, nil +} + +func parseHeapHeader(line string) (sampling string, period int64, hasAlloc bool, err error) { + header := heapHeaderRE.FindStringSubmatch(line) + if header == nil { + return "", 0, false, errUnrecognized + } + + if len(header[6]) > 0 { + if period, err = strconv.ParseInt(string(header[6]), 10, 64); err != nil { + return "", 0, false, errUnrecognized + } + } + + if (header[3] != header[1] && header[3] != "0") || (header[4] != header[2] && header[4] != "0") { + hasAlloc = true + } + + switch header[5] { + case "heapz_v2", "heap_v2": + return "v2", period, hasAlloc, nil + case "heapprofile": + return "", 1, hasAlloc, nil + case "heap": + return "v2", period / 2, hasAlloc, nil + default: + return "", 0, false, errUnrecognized + } +} + +// parseHeapSample parses a single row from a heap profile into a new Sample. +func parseHeapSample(line string, rate int64, sampling string, includeAlloc bool) (value []int64, blocksize int64, addrs []uint64, err error) { + sampleData := heapSampleRE.FindStringSubmatch(line) + if len(sampleData) != 6 { + return nil, 0, nil, fmt.Errorf("unexpected number of sample values: got %d, want 6", len(sampleData)) + } + + // This is a local-scoped helper function to avoid needing to pass + // around rate, sampling and many return parameters. + addValues := func(countString, sizeString string, label string) error { + count, err := strconv.ParseInt(countString, 10, 64) + if err != nil { + return fmt.Errorf("malformed sample: %s: %v", line, err) + } + size, err := strconv.ParseInt(sizeString, 10, 64) + if err != nil { + return fmt.Errorf("malformed sample: %s: %v", line, err) + } + if count == 0 && size != 0 { + return fmt.Errorf("%s count was 0 but %s bytes was %d", label, label, size) + } + if count != 0 { + blocksize = size / count + if sampling == "v2" { + count, size = scaleHeapSample(count, size, rate) + } + } + value = append(value, count, size) + return nil + } + + if includeAlloc { + if err := addValues(sampleData[3], sampleData[4], "allocation"); err != nil { + return nil, 0, nil, err + } + } + + if err := addValues(sampleData[1], sampleData[2], "inuse"); err != nil { + return nil, 0, nil, err + } + + addrs = parseHexAddresses(sampleData[5]) + + return value, blocksize, addrs, nil +} + +// extractHexAddresses extracts hex numbers from a string and returns +// them, together with their numeric value, in a slice. +func extractHexAddresses(s string) ([]string, []uint64) { + hexStrings := hexNumberRE.FindAllString(s, -1) + var ids []uint64 + for _, s := range hexStrings { + if id, err := strconv.ParseUint(s, 0, 64); err == nil { + ids = append(ids, id) + } else { + // Do not expect any parsing failures due to the regexp matching. + panic("failed to parse hex value:" + s) + } + } + return hexStrings, ids +} + +// parseHexAddresses parses hex numbers from a string and returns them +// in a slice. +func parseHexAddresses(s string) []uint64 { + _, ids := extractHexAddresses(s) + return ids +} + +// scaleHeapSample adjusts the data from a heapz Sample to +// account for its probability of appearing in the collected +// data. heapz profiles are a sampling of the memory allocations +// requests in a program. We estimate the unsampled value by dividing +// each collected sample by its probability of appearing in the +// profile. heapz v2 profiles rely on a poisson process to determine +// which samples to collect, based on the desired average collection +// rate R. The probability of a sample of size S to appear in that +// profile is 1-exp(-S/R). +func scaleHeapSample(count, size, rate int64) (int64, int64) { + if count == 0 || size == 0 { + return 0, 0 + } + + if rate <= 1 { + // if rate==1 all samples were collected so no adjustment is needed. + // if rate<1 treat as unknown and skip scaling. + return count, size + } + + avgSize := float64(size) / float64(count) + scale := 1 / (1 - math.Exp(-avgSize/float64(rate))) + + return int64(float64(count) * scale), int64(float64(size) * scale) +} + +// parseContention parses a contentionz profile and returns a newly +// populated Profile. +func parseContention(b []byte) (p *Profile, err error) { + r := bytes.NewBuffer(b) + l, err := r.ReadString('\n') + if err != nil { + return nil, errUnrecognized + } + + if !strings.HasPrefix(l, "--- contention") { + return nil, errUnrecognized + } + + p = &Profile{ + PeriodType: &ValueType{Type: "contentions", Unit: "count"}, + Period: 1, + SampleType: []*ValueType{ + {Type: "contentions", Unit: "count"}, + {Type: "delay", Unit: "nanoseconds"}, + }, + } + + var cpuHz int64 + // Parse text of the form "attribute = value" before the samples. + const delimiter = "=" + for { + l, err = r.ReadString('\n') + if err != nil { + if err != io.EOF { + return nil, err + } + + if l == "" { + break + } + } + + if l = strings.TrimSpace(l); l == "" { + continue + } + + if strings.HasPrefix(l, "---") { + break + } + + attr := strings.SplitN(l, delimiter, 2) + if len(attr) != 2 { + break + } + key, val := strings.TrimSpace(attr[0]), strings.TrimSpace(attr[1]) + var err error + switch key { + case "cycles/second": + if cpuHz, err = strconv.ParseInt(val, 0, 64); err != nil { + return nil, errUnrecognized + } + case "sampling period": + if p.Period, err = strconv.ParseInt(val, 0, 64); err != nil { + return nil, errUnrecognized + } + case "ms since reset": + ms, err := strconv.ParseInt(val, 0, 64) + if err != nil { + return nil, errUnrecognized + } + p.DurationNanos = ms * 1000 * 1000 + case "format": + // CPP contentionz profiles don't have format. + return nil, errUnrecognized + case "resolution": + // CPP contentionz profiles don't have resolution. + return nil, errUnrecognized + case "discarded samples": + default: + return nil, errUnrecognized + } + } + + locs := make(map[uint64]*Location) + for { + if l = strings.TrimSpace(l); strings.HasPrefix(l, "---") { + break + } + value, addrs, err := parseContentionSample(l, p.Period, cpuHz) + if err != nil { + return nil, err + } + var sloc []*Location + for i, addr := range addrs { + // Addresses from stack traces point to the next instruction after + // each call. Adjust by -1 to land somewhere on the actual call + // (except for the leaf, which is not a call). + if i > 0 { + addr-- + } + loc := locs[addr] + if locs[addr] == nil { + loc = &Location{ + Address: addr, + } + p.Location = append(p.Location, loc) + locs[addr] = loc + } + sloc = append(sloc, loc) + } + p.Sample = append(p.Sample, &Sample{ + Value: value, + Location: sloc, + }) + + if l, err = r.ReadString('\n'); err != nil { + if err != io.EOF { + return nil, err + } + if l == "" { + break + } + } + } + + if err = parseAdditionalSections(l, r, p); err != nil { + return nil, err + } + + return p, nil +} + +// parseContentionSample parses a single row from a contention profile +// into a new Sample. +func parseContentionSample(line string, period, cpuHz int64) (value []int64, addrs []uint64, err error) { + sampleData := contentionSampleRE.FindStringSubmatch(line) + if sampleData == nil { + return value, addrs, errUnrecognized + } + + v1, err := strconv.ParseInt(sampleData[1], 10, 64) + if err != nil { + return value, addrs, fmt.Errorf("malformed sample: %s: %v", line, err) + } + v2, err := strconv.ParseInt(sampleData[2], 10, 64) + if err != nil { + return value, addrs, fmt.Errorf("malformed sample: %s: %v", line, err) + } + + // Unsample values if period and cpuHz are available. + // - Delays are scaled to cycles and then to nanoseconds. + // - Contentions are scaled to cycles. + if period > 0 { + if cpuHz > 0 { + cpuGHz := float64(cpuHz) / 1e9 + v1 = int64(float64(v1) * float64(period) / cpuGHz) + } + v2 = v2 * period + } + + value = []int64{v2, v1} + addrs = parseHexAddresses(sampleData[3]) + + return value, addrs, nil +} + +// parseThread parses a Threadz profile and returns a new Profile. +func parseThread(b []byte) (*Profile, error) { + r := bytes.NewBuffer(b) + + var line string + var err error + for { + // Skip past comments and empty lines seeking a real header. + line, err = r.ReadString('\n') + if err != nil { + return nil, err + } + if !isSpaceOrComment(line) { + break + } + } + + if m := threadzStartRE.FindStringSubmatch(line); m != nil { + // Advance over initial comments until first stack trace. + for { + line, err = r.ReadString('\n') + if err != nil { + if err != io.EOF { + return nil, err + } + + if line == "" { + break + } + } + if sectionTrigger(line) != unrecognizedSection || line[0] == '-' { + break + } + } + } else if t := threadStartRE.FindStringSubmatch(line); len(t) != 4 { + return nil, errUnrecognized + } + + p := &Profile{ + SampleType: []*ValueType{{Type: "thread", Unit: "count"}}, + PeriodType: &ValueType{Type: "thread", Unit: "count"}, + Period: 1, + } + + locs := make(map[uint64]*Location) + // Recognize each thread and populate profile samples. + for sectionTrigger(line) == unrecognizedSection { + if strings.HasPrefix(line, "---- no stack trace for") { + line = "" + break + } + if t := threadStartRE.FindStringSubmatch(line); len(t) != 4 { + return nil, errUnrecognized + } + + var addrs []uint64 + line, addrs, err = parseThreadSample(r) + if err != nil { + return nil, errUnrecognized + } + if len(addrs) == 0 { + // We got a --same as previous threads--. Bump counters. + if len(p.Sample) > 0 { + s := p.Sample[len(p.Sample)-1] + s.Value[0]++ + } + continue + } + + var sloc []*Location + for i, addr := range addrs { + // Addresses from stack traces point to the next instruction after + // each call. Adjust by -1 to land somewhere on the actual call + // (except for the leaf, which is not a call). + if i > 0 { + addr-- + } + loc := locs[addr] + if locs[addr] == nil { + loc = &Location{ + Address: addr, + } + p.Location = append(p.Location, loc) + locs[addr] = loc + } + sloc = append(sloc, loc) + } + + p.Sample = append(p.Sample, &Sample{ + Value: []int64{1}, + Location: sloc, + }) + } + + if err = parseAdditionalSections(line, r, p); err != nil { + return nil, err + } + + return p, nil +} + +// parseThreadSample parses a symbolized or unsymbolized stack trace. +// Returns the first line after the traceback, the sample (or nil if +// it hits a 'same-as-previous' marker) and an error. +func parseThreadSample(b *bytes.Buffer) (nextl string, addrs []uint64, err error) { + var l string + sameAsPrevious := false + for { + if l, err = b.ReadString('\n'); err != nil { + if err != io.EOF { + return "", nil, err + } + if l == "" { + break + } + } + if l = strings.TrimSpace(l); l == "" { + continue + } + + if strings.HasPrefix(l, "---") { + break + } + if strings.Contains(l, "same as previous thread") { + sameAsPrevious = true + continue + } + + addrs = append(addrs, parseHexAddresses(l)...) + } + + if sameAsPrevious { + return l, nil, nil + } + return l, addrs, nil +} + +// parseAdditionalSections parses any additional sections in the +// profile, ignoring any unrecognized sections. +func parseAdditionalSections(l string, b *bytes.Buffer, p *Profile) (err error) { + for { + if sectionTrigger(l) == memoryMapSection { + break + } + // Ignore any unrecognized sections. + if l, err := b.ReadString('\n'); err != nil { + if err != io.EOF { + return err + } + if l == "" { + break + } + } + } + return p.ParseMemoryMap(b) +} + +// ParseProcMaps parses a memory map in the format of /proc/self/maps. +// ParseMemoryMap should be called after setting on a profile to +// associate locations to the corresponding mapping based on their +// address. +func ParseProcMaps(rd io.Reader) ([]*Mapping, error) { + var mapping []*Mapping + + b := bufio.NewReader(rd) + + var attrs []string + var r *strings.Replacer + const delimiter = "=" + for { + l, err := b.ReadString('\n') + if err != nil { + if err != io.EOF { + return nil, err + } + if l == "" { + break + } + } + if l = strings.TrimSpace(l); l == "" { + continue + } + + if r != nil { + l = r.Replace(l) + } + m, err := parseMappingEntry(l) + if err != nil { + if err == errUnrecognized { + // Recognize assignments of the form: attr=value, and replace + // $attr with value on subsequent mappings. + if attr := strings.SplitN(l, delimiter, 2); len(attr) == 2 { + attrs = append(attrs, "$"+strings.TrimSpace(attr[0]), strings.TrimSpace(attr[1])) + r = strings.NewReplacer(attrs...) + } + // Ignore any unrecognized entries + continue + } + return nil, err + } + if m == nil { + continue + } + mapping = append(mapping, m) + } + return mapping, nil +} + +// ParseMemoryMap parses a memory map in the format of +// /proc/self/maps, and overrides the mappings in the current profile. +// It renumbers the samples and locations in the profile correspondingly. +func (p *Profile) ParseMemoryMap(rd io.Reader) error { + mapping, err := ParseProcMaps(rd) + if err != nil { + return err + } + p.Mapping = append(p.Mapping, mapping...) + p.massageMappings() + p.remapLocationIDs() + p.remapFunctionIDs() + p.remapMappingIDs() + return nil +} + +func parseMappingEntry(l string) (*Mapping, error) { + mapping := &Mapping{} + var err error + if me := procMapsRE.FindStringSubmatch(l); len(me) == 9 { + if !strings.Contains(me[3], "x") { + // Skip non-executable entries. + return nil, nil + } + if mapping.Start, err = strconv.ParseUint(me[1], 16, 64); err != nil { + return nil, errUnrecognized + } + if mapping.Limit, err = strconv.ParseUint(me[2], 16, 64); err != nil { + return nil, errUnrecognized + } + if me[4] != "" { + if mapping.Offset, err = strconv.ParseUint(me[4], 16, 64); err != nil { + return nil, errUnrecognized + } + } + mapping.File = me[8] + return mapping, nil + } + + if me := briefMapsRE.FindStringSubmatch(l); len(me) == 6 { + if mapping.Start, err = strconv.ParseUint(me[1], 16, 64); err != nil { + return nil, errUnrecognized + } + if mapping.Limit, err = strconv.ParseUint(me[2], 16, 64); err != nil { + return nil, errUnrecognized + } + mapping.File = me[3] + if me[5] != "" { + if mapping.Offset, err = strconv.ParseUint(me[5], 16, 64); err != nil { + return nil, errUnrecognized + } + } + return mapping, nil + } + + return nil, errUnrecognized +} + +type sectionType int + +const ( + unrecognizedSection sectionType = iota + memoryMapSection +) + +var memoryMapTriggers = []string{ + "--- Memory map: ---", + "MAPPED_LIBRARIES:", +} + +func sectionTrigger(line string) sectionType { + for _, trigger := range memoryMapTriggers { + if strings.Contains(line, trigger) { + return memoryMapSection + } + } + return unrecognizedSection +} + +func (p *Profile) addLegacyFrameInfo() { + switch { + case isProfileType(p, heapzSampleTypes): + p.DropFrames, p.KeepFrames = allocRxStr, allocSkipRxStr + case isProfileType(p, contentionzSampleTypes): + p.DropFrames, p.KeepFrames = lockRxStr, "" + default: + p.DropFrames, p.KeepFrames = cpuProfilerRxStr, "" + } +} + +var heapzSampleTypes = [][]string{ + {"allocations", "size"}, // early Go pprof profiles + {"objects", "space"}, + {"inuse_objects", "inuse_space"}, + {"alloc_objects", "alloc_space"}, +} +var contentionzSampleTypes = [][]string{ + {"contentions", "delay"}, +} + +func isProfileType(p *Profile, types [][]string) bool { + st := p.SampleType +nextType: + for _, t := range types { + if len(st) != len(t) { + continue + } + + for i := range st { + if st[i].Type != t[i] { + continue nextType + } + } + return true + } + return false +} + +var allocRxStr = strings.Join([]string{ + // POSIX entry points. + `calloc`, + `cfree`, + `malloc`, + `free`, + `memalign`, + `do_memalign`, + `(__)?posix_memalign`, + `pvalloc`, + `valloc`, + `realloc`, + + // TC malloc. + `tcmalloc::.*`, + `tc_calloc`, + `tc_cfree`, + `tc_malloc`, + `tc_free`, + `tc_memalign`, + `tc_posix_memalign`, + `tc_pvalloc`, + `tc_valloc`, + `tc_realloc`, + `tc_new`, + `tc_delete`, + `tc_newarray`, + `tc_deletearray`, + `tc_new_nothrow`, + `tc_newarray_nothrow`, + + // Memory-allocation routines on OS X. + `malloc_zone_malloc`, + `malloc_zone_calloc`, + `malloc_zone_valloc`, + `malloc_zone_realloc`, + `malloc_zone_memalign`, + `malloc_zone_free`, + + // Go runtime + `runtime\..*`, + + // Other misc. memory allocation routines + `BaseArena::.*`, + `(::)?do_malloc_no_errno`, + `(::)?do_malloc_pages`, + `(::)?do_malloc`, + `DoSampledAllocation`, + `MallocedMemBlock::MallocedMemBlock`, + `_M_allocate`, + `__builtin_(vec_)?delete`, + `__builtin_(vec_)?new`, + `__gnu_cxx::new_allocator::allocate`, + `__libc_malloc`, + `__malloc_alloc_template::allocate`, + `allocate`, + `cpp_alloc`, + `operator new(\[\])?`, + `simple_alloc::allocate`, +}, `|`) + +var allocSkipRxStr = strings.Join([]string{ + // Preserve Go runtime frames that appear in the middle/bottom of + // the stack. + `runtime\.panic`, +}, `|`) + +var cpuProfilerRxStr = strings.Join([]string{ + `ProfileData::Add`, + `ProfileData::prof_handler`, + `CpuProfiler::prof_handler`, + `__pthread_sighandler`, + `__restore`, +}, `|`) + +var lockRxStr = strings.Join([]string{ + `RecordLockProfileData`, + `(base::)?RecordLockProfileData.*`, + `(base::)?SubmitMutexProfileData.*`, + `(base::)?SubmitSpinLockProfileData.*`, + `(Mutex::)?AwaitCommon.*`, + `(Mutex::)?Unlock.*`, + `(Mutex::)?UnlockSlow.*`, + `(Mutex::)?ReaderUnlock.*`, + `(MutexLock::)?~MutexLock.*`, + `(SpinLock::)?Unlock.*`, + `(SpinLock::)?SlowUnlock.*`, + `(SpinLockHolder::)?~SpinLockHolder.*`, +}, `|`) diff --git a/src/profile/legacy_profile_test.go b/src/profile/legacy_profile_test.go new file mode 100644 index 00000000..4bbf8117 --- /dev/null +++ b/src/profile/legacy_profile_test.go @@ -0,0 +1,120 @@ +// Copyright 2014 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package profile + +import ( + "bytes" + "fmt" + "reflect" + "strconv" + "strings" + "testing" +) + +func TestLegacyProfileType(t *testing.T) { + type testcase struct { + sampleTypes []string + typeSet [][]string + want bool + setName string + } + + heap := heapzSampleTypes + cont := contentionzSampleTypes + testcases := []testcase{ + // True cases + {[]string{"allocations", "size"}, heap, true, "heapzSampleTypes"}, + {[]string{"objects", "space"}, heap, true, "heapzSampleTypes"}, + {[]string{"inuse_objects", "inuse_space"}, heap, true, "heapzSampleTypes"}, + {[]string{"alloc_objects", "alloc_space"}, heap, true, "heapzSampleTypes"}, + {[]string{"contentions", "delay"}, cont, true, "contentionzSampleTypes"}, + // False cases + {[]string{"objects"}, heap, false, "heapzSampleTypes"}, + {[]string{"objects", "unknown"}, heap, false, "heapzSampleTypes"}, + {[]string{"contentions", "delay"}, heap, false, "heapzSampleTypes"}, + {[]string{"samples", "cpu"}, heap, false, "heapzSampleTypes"}, + {[]string{"samples", "cpu"}, cont, false, "contentionzSampleTypes"}, + } + + for _, tc := range testcases { + p := profileOfType(tc.sampleTypes) + if got := isProfileType(p, tc.typeSet); got != tc.want { + t.Error("isProfileType({"+strings.Join(tc.sampleTypes, ",")+"},", tc.setName, "), got", got, "want", tc.want) + } + } +} + +func TestCpuParse(t *testing.T) { + // profileString is a legacy encoded profile, represnted by words separated by ":" + // Each sample has the form value : N : stack1..stackN + // EOF is represented as "0:1:0" + profileString := "1:3:100:999:100:" // sample with bogus 999 and duplicate leaf + profileString += "1:5:200:999:200:501:502:" // sample with bogus 999 and duplicate leaf + profileString += "1:12:300:999:300:601:602:603:604:605:606:607:608:609:" // sample with bogus 999 and duplicate leaf + profileString += "0:1:0000" // EOF -- must use 4 bytes for the final zero + + p, err := cpuProfile([]byte(profileString), 1, parseString) + if err != nil { + t.Fatal(err) + } + + if err := checkTestSample(p, []uint64{100}); err != nil { + t.Error(err) + } + if err := checkTestSample(p, []uint64{200, 500, 501}); err != nil { + t.Error(err) + } + if err := checkTestSample(p, []uint64{300, 600, 601, 602, 603, 604, 605, 606, 607, 608}); err != nil { + t.Error(err) + } +} + +func parseString(b []byte) (uint64, []byte) { + slices := bytes.SplitN(b, []byte(":"), 2) + var value, remainder []byte + if len(slices) > 0 { + value = slices[0] + } + if len(slices) > 1 { + remainder = slices[1] + } + v, _ := strconv.ParseUint(string(value), 10, 64) + return v, remainder +} + +func checkTestSample(p *Profile, want []uint64) error { + for _, s := range p.Sample { + got := []uint64{} + for _, l := range s.Location { + got = append(got, l.Address) + } + if reflect.DeepEqual(got, want) { + return nil + } + } + return fmt.Errorf("Could not find sample : %v", want) +} + +// profileOfType creates an empty profile with only sample types set, +// for testing purposes only. +func profileOfType(sampleTypes []string) *Profile { + p := new(Profile) + p.SampleType = make([]*ValueType, len(sampleTypes)) + for i, t := range sampleTypes { + p.SampleType[i] = new(ValueType) + p.SampleType[i].Type = t + } + return p +} diff --git a/src/profile/merge.go b/src/profile/merge.go new file mode 100644 index 00000000..92df2e9b --- /dev/null +++ b/src/profile/merge.go @@ -0,0 +1,386 @@ +// Copyright 2014 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package profile + +import ( + "fmt" + "reflect" + "sort" + "strconv" + "strings" +) + +// Merge merges all the profiles in profs into a single Profile. +// Returns a new profile independent of the input profiles. The merged +// profile is compacted to eliminate unused samples, locations, +// functions and mappings. Profiles must have identical profile sample +// and period types or the merge will fail. profile.Period of the +// resulting profile will be the maximum of all profiles, and +// profile.TimeNanos will be the earliest nonzero one. +func Merge(srcs []*Profile) (*Profile, error) { + if len(srcs) == 0 { + return nil, fmt.Errorf("no profiles to merge") + } + p, err := combineHeaders(srcs) + if err != nil { + return nil, err + } + + pm := &profileMerger{ + p: p, + samples: make(map[string]*Sample, len(srcs[0].Sample)), + locations: make(map[string]*Location, len(srcs[0].Location)), + functions: make(map[string]*Function, len(srcs[0].Function)), + mappings: make(map[string]*Mapping, len(srcs[0].Mapping)), + } + + for _, src := range srcs { + // Clear the profile-specific hash tables + pm.locationsByID = make(map[uint64]*Location, len(src.Location)) + pm.functionsByID = make(map[uint64]*Function, len(src.Function)) + pm.mappingsByID = make(map[uint64]mapInfo, len(src.Mapping)) + + if len(pm.mappings) == 0 && len(src.Mapping) > 0 { + // The Mapping list has the property that the first mapping + // represents the main binary. Take the first Mapping we see, + // otherwise the operations below will add mappings in an + // arbitrary order. + pm.mapMapping(srcs[0].Mapping[0]) + } + + for _, s := range src.Sample { + if !isZeroSample(s) { + pm.mapSample(s) + } + } + } + + for _, s := range p.Sample { + if isZeroSample(s) { + // If there are any zero samples, re-merge the profile to GC + // them. + return Merge([]*Profile{p}) + } + } + + return p, nil +} + +func isZeroSample(s *Sample) bool { + for _, v := range s.Value { + if v != 0 { + return false + } + } + return true +} + +type profileMerger struct { + p *Profile + + // Memoization tables within a profile. + locationsByID map[uint64]*Location + functionsByID map[uint64]*Function + mappingsByID map[uint64]mapInfo + + // Memoization tables for profile entities. + samples map[string]*Sample + locations map[string]*Location + functions map[string]*Function + mappings map[string]*Mapping +} + +type mapInfo struct { + m *Mapping + offset int64 +} + +func (pm *profileMerger) mapSample(src *Sample) *Sample { + s := &Sample{ + Location: make([]*Location, len(src.Location)), + Value: make([]int64, len(src.Value)), + Label: make(map[string][]string, len(src.Label)), + NumLabel: make(map[string][]int64, len(src.NumLabel)), + } + for i, l := range src.Location { + s.Location[i] = pm.mapLocation(l) + } + for k, v := range src.Label { + vv := make([]string, len(v)) + copy(vv, v) + s.Label[k] = vv + } + for k, v := range src.NumLabel { + vv := make([]int64, len(v)) + copy(vv, v) + s.NumLabel[k] = vv + } + // Check memoization table. Must be done on the remapped location to + // account for the remapped mapping. Add current values to the + // existing sample. + k := s.key() + if ss, ok := pm.samples[k]; ok { + for i, v := range src.Value { + ss.Value[i] += v + } + return ss + } + copy(s.Value, src.Value) + pm.samples[k] = s + pm.p.Sample = append(pm.p.Sample, s) + return s +} + +// key generates encoded string of Sample to be used as a key for maps. +func (sample *Sample) key() (s string) { + ids := make([]string, len(sample.Location)) + for i, l := range sample.Location { + ids[i] = strconv.FormatUint(l.ID, 16) + } + s = strings.Join(ids, "|") + if len(sample.Label) != 0 { + labels := make([]string, 0, len(sample.Label)) + for k, v := range sample.Label { + labels = append(labels, fmt.Sprintf("%q%q", k, v)) + } + sort.Strings(labels) + s += strings.Join(labels, "") + } + if len(sample.NumLabel) > 0 { + labels := make([]string, 0, len(sample.NumLabel)) + for k, v := range sample.NumLabel { + labels = append(labels, fmt.Sprintf("%q%v", k, v)) + } + sort.Strings(labels) + s += strings.Join(labels, "") + } + return s +} + +func (pm *profileMerger) mapLocation(src *Location) *Location { + if src == nil { + return nil + } + + if l, ok := pm.locationsByID[src.ID]; ok { + pm.locationsByID[src.ID] = l + return l + } + + mi := pm.mapMapping(src.Mapping) + l := &Location{ + ID: uint64(len(pm.p.Location) + 1), + Mapping: mi.m, + Address: uint64(int64(src.Address) + mi.offset), + Line: make([]Line, len(src.Line)), + } + for i, ln := range src.Line { + l.Line[i] = pm.mapLine(ln) + } + // Check memoization table. Must be done on the remapped location to + // account for the remapped mapping ID. + k := l.key() + if ll, ok := pm.locations[k]; ok { + pm.locationsByID[src.ID] = ll + return ll + } + pm.locationsByID[src.ID] = l + pm.locations[k] = l + pm.p.Location = append(pm.p.Location, l) + return l +} + +// key generates encoded string of Location to be used as a key for maps. +func (l *Location) key() string { + addr := l.Address + var s string + if l.Mapping != nil { + // Normalizes address to handle address space randomization. + addr -= l.Mapping.Start + s = strconv.FormatUint(l.Mapping.ID, 16) + "|" + strconv.FormatUint(addr, 16) + } else { + s = "0|" + strconv.FormatUint(addr, 16) + } + for _, line := range l.Line { + s += strconv.FormatUint(line.Function.ID, 16) + "|" + strconv.FormatInt(line.Line, 16) + } + return s +} + +func (pm *profileMerger) mapMapping(src *Mapping) mapInfo { + if src == nil { + return mapInfo{} + } + + if mi, ok := pm.mappingsByID[src.ID]; ok { + return mi + } + + // Check memoization tables. + bk, pk := src.key() + if src.BuildID != "" { + if m, ok := pm.mappings[bk]; ok { + mi := mapInfo{m, int64(m.Start) - int64(src.Start)} + pm.mappingsByID[src.ID] = mi + return mi + } + } + if src.File != "" { + if m, ok := pm.mappings[pk]; ok { + mi := mapInfo{m, int64(m.Start) - int64(src.Start)} + pm.mappingsByID[src.ID] = mi + return mi + } + } + m := &Mapping{ + ID: uint64(len(pm.p.Mapping) + 1), + Start: src.Start, + Limit: src.Limit, + Offset: src.Offset, + File: src.File, + BuildID: src.BuildID, + HasFunctions: src.HasFunctions, + HasFilenames: src.HasFilenames, + HasLineNumbers: src.HasLineNumbers, + HasInlineFrames: src.HasInlineFrames, + } + pm.p.Mapping = append(pm.p.Mapping, m) + + // Update memoization tables. + if m.BuildID != "" { + pm.mappings[bk] = m + } + if m.File != "" { + pm.mappings[pk] = m + } + mi := mapInfo{m, 0} + pm.mappingsByID[src.ID] = mi + return mi +} + +// key generates encoded strings of Mapping to be used as a key for +// maps. The first key represents only the build id, while the second +// represents only the file path. +func (m *Mapping) key() (buildIDKey, pathKey string) { + // Normalize addresses to handle address space randomization. + // Round up to next 4K boundary to avoid minor discrepancies. + const mapsizeRounding = 0x1000 + + size := m.Limit - m.Start + size = size + mapsizeRounding - 1 + size = size - (size % mapsizeRounding) + key := strconv.FormatUint(size, 16) + "|" + strconv.FormatUint(m.Offset, 16) + buildIDKey = key + "B" + strconv.Quote(m.BuildID) + pathKey = key + "F" + strconv.Quote(m.File) + return +} + +func (pm *profileMerger) mapLine(src Line) Line { + ln := Line{ + Function: pm.mapFunction(src.Function), + Line: src.Line, + } + return ln +} + +func (pm *profileMerger) mapFunction(src *Function) *Function { + if src == nil { + return nil + } + if f, ok := pm.functionsByID[src.ID]; ok { + return f + } + k := src.key() + if f, ok := pm.functions[k]; ok { + pm.functionsByID[src.ID] = f + return f + } + f := &Function{ + ID: uint64(len(pm.p.Function) + 1), + Name: src.Name, + SystemName: src.SystemName, + Filename: src.Filename, + StartLine: src.StartLine, + } + pm.functions[k] = f + pm.functionsByID[src.ID] = f + pm.p.Function = append(pm.p.Function, f) + return f +} + +// key generates encoded string of Function to be used as a key for maps. +func (f *Function) key() string { + return fmt.Sprintf("%x%q%q%q", f.StartLine, f.Name, f.SystemName, f.Filename) +} + +// combineHeaders checks that all profiles can be merged and returns +// their combined profile. +func combineHeaders(srcs []*Profile) (*Profile, error) { + for _, s := range srcs[1:] { + if err := srcs[0].compatible(s); err != nil { + return nil, err + } + } + + var timeNanos, durationNanos, period int64 + var comments []string + for _, s := range srcs { + if timeNanos == 0 || s.TimeNanos < timeNanos { + timeNanos = s.TimeNanos + } + durationNanos += s.DurationNanos + if period == 0 || period < s.Period { + period = s.Period + } + comments = append(comments, s.Comments...) + } + + p := &Profile{ + SampleType: make([]*ValueType, len(srcs[0].SampleType)), + + DropFrames: srcs[0].DropFrames, + KeepFrames: srcs[0].KeepFrames, + + TimeNanos: timeNanos, + DurationNanos: durationNanos, + PeriodType: srcs[0].PeriodType, + Period: period, + + Comments: comments, + } + copy(p.SampleType, srcs[0].SampleType) + return p, nil +} + +// compatible determines if two profiles can be compared/merged. +// returns nil if the profiles are compatible; otherwise an error with +// details on the incompatibility. +func (p *Profile) compatible(pb *Profile) error { + if !reflect.DeepEqual(p.PeriodType, pb.PeriodType) { + return fmt.Errorf("incompatible period types %v and %v", p.PeriodType, pb.PeriodType) + } + + if len(p.SampleType) != len(pb.SampleType) { + return fmt.Errorf("incompatible sample types %v and %v", p.SampleType, pb.SampleType) + } + + for i := range p.SampleType { + if !reflect.DeepEqual(p.SampleType[i], pb.SampleType[i]) { + return fmt.Errorf("incompatible sample types %v and %v", p.SampleType, pb.SampleType) + } + } + + return nil +} diff --git a/src/profile/profile.go b/src/profile/profile.go new file mode 100644 index 00000000..e23f5417 --- /dev/null +++ b/src/profile/profile.go @@ -0,0 +1,563 @@ +// Copyright 2014 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package profile provides a representation of profile.proto and +// methods to encode/decode profiles in this format. +package profile + +import ( + "bytes" + "compress/gzip" + "fmt" + "io" + "io/ioutil" + "regexp" + "strings" + "time" +) + +// Profile is an in-memory representation of profile.proto. +type Profile struct { + SampleType []*ValueType + Sample []*Sample + Mapping []*Mapping + Location []*Location + Function []*Function + Comments []string + + DropFrames string + KeepFrames string + + TimeNanos int64 + DurationNanos int64 + PeriodType *ValueType + Period int64 + + commentX []int64 + dropFramesX int64 + keepFramesX int64 + stringTable []string +} + +// ValueType corresponds to Profile.ValueType +type ValueType struct { + Type string // cpu, wall, inuse_space, etc + Unit string // seconds, nanoseconds, bytes, etc + + typeX int64 + unitX int64 +} + +// Sample corresponds to Profile.Sample +type Sample struct { + Location []*Location + Value []int64 + Label map[string][]string + NumLabel map[string][]int64 + + locationIDX []uint64 + labelX []label +} + +// label corresponds to Profile.Label +type label struct { + keyX int64 + // Exactly one of the two following values must be set + strX int64 + numX int64 // Integer value for this label +} + +// Mapping corresponds to Profile.Mapping +type Mapping struct { + ID uint64 + Start uint64 + Limit uint64 + Offset uint64 + File string + BuildID string + HasFunctions bool + HasFilenames bool + HasLineNumbers bool + HasInlineFrames bool + + fileX int64 + buildIDX int64 +} + +// Location corresponds to Profile.Location +type Location struct { + ID uint64 + Mapping *Mapping + Address uint64 + Line []Line + + mappingIDX uint64 +} + +// Line corresponds to Profile.Line +type Line struct { + Function *Function + Line int64 + + functionIDX uint64 +} + +// Function corresponds to Profile.Function +type Function struct { + ID uint64 + Name string + SystemName string + Filename string + StartLine int64 + + nameX int64 + systemNameX int64 + filenameX int64 +} + +// Parse parses a profile and checks for its validity. The input +// may be a gzip-compressed encoded protobuf or one of many legacy +// profile formats which may be unsupported in the future. +func Parse(r io.Reader) (*Profile, error) { + data, err := ioutil.ReadAll(r) + if err != nil { + return nil, err + } + return ParseData(data) +} + +// ParseData parses a profile from a buffer and checks for its +// validity. +func ParseData(data []byte) (*Profile, error) { + var p *Profile + var err error + if len(data) >= 2 && data[0] == 0x1f && data[1] == 0x8b { + gz, err := gzip.NewReader(bytes.NewBuffer(data)) + if err == nil { + data, err = ioutil.ReadAll(gz) + } + if err != nil { + return nil, fmt.Errorf("decompressing profile: %v", err) + } + } + if p, err = ParseUncompressed(data); err != nil { + if p, err = parseLegacy(data); err != nil { + return nil, fmt.Errorf("parsing profile: %v", err) + } + } + + if err := p.CheckValid(); err != nil { + return nil, fmt.Errorf("malformed profile: %v", err) + } + return p, nil +} + +var errUnrecognized = fmt.Errorf("unrecognized profile format") +var errMalformed = fmt.Errorf("malformed profile format") + +func parseLegacy(data []byte) (*Profile, error) { + parsers := []func([]byte) (*Profile, error){ + parseCPU, + parseHeap, + parseGoCount, // goroutine, threadcreate + parseThread, + parseContention, + parseJavaProfile, + } + + for _, parser := range parsers { + p, err := parser(data) + if err == nil { + p.addLegacyFrameInfo() + return p, nil + } + if err != errUnrecognized { + return nil, err + } + } + return nil, errUnrecognized +} + +// ParseUncompressed parses an uncompressed protobuf into a profile. +func ParseUncompressed(data []byte) (*Profile, error) { + p := &Profile{} + if err := unmarshal(data, p); err != nil { + return nil, err + } + + if err := p.postDecode(); err != nil { + return nil, err + } + + return p, nil +} + +var libRx = regexp.MustCompile(`([.]so$|[.]so[._][0-9]+)`) + +// massageMappings applies heuristic-based changes to the profile +// mappings to account for quirks of some environments. +func (p *Profile) massageMappings() { + // Merge adjacent regions with matching names, checking that the offsets match + if len(p.Mapping) > 1 { + mappings := []*Mapping{p.Mapping[0]} + for _, m := range p.Mapping[1:] { + lm := mappings[len(mappings)-1] + if offset := lm.Offset + (lm.Limit - lm.Start); lm.Limit == m.Start && + offset == m.Offset && + (lm.File == m.File || lm.File == "") { + lm.File = m.File + lm.Limit = m.Limit + if lm.BuildID == "" { + lm.BuildID = m.BuildID + } + p.updateLocationMapping(m, lm) + continue + } + mappings = append(mappings, m) + } + p.Mapping = mappings + } + + // Use heuristics to identify main binary and move it to the top of the list of mappings + for i, m := range p.Mapping { + file := strings.TrimSpace(strings.Replace(m.File, "(deleted)", "", -1)) + if len(file) == 0 { + continue + } + if len(libRx.FindStringSubmatch(file)) > 0 { + continue + } + if strings.HasPrefix(file, "[") { + continue + } + // Swap what we guess is main to position 0. + p.Mapping[0], p.Mapping[i] = p.Mapping[i], p.Mapping[0] + break + } + + // Keep the mapping IDs neatly sorted + for i, m := range p.Mapping { + m.ID = uint64(i + 1) + } +} + +func (p *Profile) updateLocationMapping(from, to *Mapping) { + for _, l := range p.Location { + if l.Mapping == from { + l.Mapping = to + } + } +} + +// Write writes the profile as a gzip-compressed marshaled protobuf. +func (p *Profile) Write(w io.Writer) error { + p.preEncode() + b := marshal(p) + zw := gzip.NewWriter(w) + defer zw.Close() + _, err := zw.Write(b) + return err +} + +// WriteUncompressed writes the profile as a marshaled protobuf. +func (p *Profile) WriteUncompressed(w io.Writer) error { + p.preEncode() + b := marshal(p) + _, err := w.Write(b) + return err +} + +// CheckValid tests whether the profile is valid. Checks include, but are +// not limited to: +// - len(Profile.Sample[n].value) == len(Profile.value_unit) +// - Sample.id has a corresponding Profile.Location +func (p *Profile) CheckValid() error { + // Check that sample values are consistent + sampleLen := len(p.SampleType) + if sampleLen == 0 && len(p.Sample) != 0 { + return fmt.Errorf("missing sample type information") + } + for _, s := range p.Sample { + if len(s.Value) != sampleLen { + return fmt.Errorf("mismatch: sample has: %d values vs. %d types", len(s.Value), len(p.SampleType)) + } + } + + // Check that all mappings/locations/functions are in the tables + // Check that there are no duplicate ids + mappings := make(map[uint64]*Mapping, len(p.Mapping)) + for _, m := range p.Mapping { + if m.ID == 0 { + return fmt.Errorf("found mapping with reserved ID=0") + } + if mappings[m.ID] != nil { + return fmt.Errorf("multiple mappings with same id: %d", m.ID) + } + mappings[m.ID] = m + } + functions := make(map[uint64]*Function, len(p.Function)) + for _, f := range p.Function { + if f.ID == 0 { + return fmt.Errorf("found function with reserved ID=0") + } + if functions[f.ID] != nil { + return fmt.Errorf("multiple functions with same id: %d", f.ID) + } + functions[f.ID] = f + } + locations := make(map[uint64]*Location, len(p.Location)) + for _, l := range p.Location { + if l.ID == 0 { + return fmt.Errorf("found location with reserved id=0") + } + if locations[l.ID] != nil { + return fmt.Errorf("multiple locations with same id: %d", l.ID) + } + locations[l.ID] = l + if m := l.Mapping; m != nil { + if m.ID == 0 || mappings[m.ID] != m { + return fmt.Errorf("inconsistent mapping %p: %d", m, m.ID) + } + } + for _, ln := range l.Line { + if f := ln.Function; f != nil { + if f.ID == 0 || functions[f.ID] != f { + return fmt.Errorf("inconsistent function %p: %d", f, f.ID) + } + } + } + } + return nil +} + +// Aggregate merges the locations in the profile into equivalence +// classes preserving the request attributes. It also updates the +// samples to point to the merged locations. +func (p *Profile) Aggregate(inlineFrame, function, filename, linenumber, address bool) error { + for _, m := range p.Mapping { + m.HasInlineFrames = m.HasInlineFrames && inlineFrame + m.HasFunctions = m.HasFunctions && function + m.HasFilenames = m.HasFilenames && filename + m.HasLineNumbers = m.HasLineNumbers && linenumber + } + + // Aggregate functions + if !function || !filename { + for _, f := range p.Function { + if !function { + f.Name = "" + f.SystemName = "" + } + if !filename { + f.Filename = "" + } + } + } + + // Aggregate locations + if !inlineFrame || !address || !linenumber { + for _, l := range p.Location { + if !inlineFrame && len(l.Line) > 1 { + l.Line = l.Line[len(l.Line)-1:] + } + if !linenumber { + for i := range l.Line { + l.Line[i].Line = 0 + } + } + if !address { + l.Address = 0 + } + } + } + + return p.CheckValid() +} + +// Print dumps a text representation of a profile. Intended mainly +// for debugging purposes. +func (p *Profile) String() string { + + ss := make([]string, 0, len(p.Sample)+len(p.Mapping)+len(p.Location)) + if pt := p.PeriodType; pt != nil { + ss = append(ss, fmt.Sprintf("PeriodType: %s %s", pt.Type, pt.Unit)) + } + ss = append(ss, fmt.Sprintf("Period: %d", p.Period)) + if p.TimeNanos != 0 { + ss = append(ss, fmt.Sprintf("Time: %v", time.Unix(0, p.TimeNanos))) + } + if p.DurationNanos != 0 { + ss = append(ss, fmt.Sprintf("Duration: %.4v", time.Duration(p.DurationNanos))) + } + + ss = append(ss, "Samples:") + var sh1 string + for _, s := range p.SampleType { + sh1 = sh1 + fmt.Sprintf("%s/%s ", s.Type, s.Unit) + } + ss = append(ss, strings.TrimSpace(sh1)) + for _, s := range p.Sample { + var sv string + for _, v := range s.Value { + sv = fmt.Sprintf("%s %10d", sv, v) + } + sv = sv + ": " + for _, l := range s.Location { + sv = sv + fmt.Sprintf("%d ", l.ID) + } + ss = append(ss, sv) + const labelHeader = " " + if len(s.Label) > 0 { + ls := labelHeader + for k, v := range s.Label { + ls = ls + fmt.Sprintf("%s:%v ", k, v) + } + ss = append(ss, ls) + } + if len(s.NumLabel) > 0 { + ls := labelHeader + for k, v := range s.NumLabel { + ls = ls + fmt.Sprintf("%s:%v ", k, v) + } + ss = append(ss, ls) + } + } + + ss = append(ss, "Locations") + for _, l := range p.Location { + locStr := fmt.Sprintf("%6d: %#x ", l.ID, l.Address) + if m := l.Mapping; m != nil { + locStr = locStr + fmt.Sprintf("M=%d ", m.ID) + } + if len(l.Line) == 0 { + ss = append(ss, locStr) + } + for li := range l.Line { + lnStr := "??" + if fn := l.Line[li].Function; fn != nil { + lnStr = fmt.Sprintf("%s %s:%d s=%d", + fn.Name, + fn.Filename, + l.Line[li].Line, + fn.StartLine) + if fn.Name != fn.SystemName { + lnStr = lnStr + "(" + fn.SystemName + ")" + } + } + ss = append(ss, locStr+lnStr) + // Do not print location details past the first line + locStr = " " + } + } + + ss = append(ss, "Mappings") + for _, m := range p.Mapping { + bits := "" + if m.HasFunctions { + bits = bits + "[FN]" + } + if m.HasFilenames { + bits = bits + "[FL]" + } + if m.HasLineNumbers { + bits = bits + "[LN]" + } + if m.HasInlineFrames { + bits = bits + "[IN]" + } + ss = append(ss, fmt.Sprintf("%d: %#x/%#x/%#x %s %s %s", + m.ID, + m.Start, m.Limit, m.Offset, + m.File, + m.BuildID, + bits)) + } + + return strings.Join(ss, "\n") + "\n" +} + +// Scale multiplies all sample values in a profile by a constant. +func (p *Profile) Scale(ratio float64) { + if ratio == 1 { + return + } + ratios := make([]float64, len(p.SampleType)) + for i := range p.SampleType { + ratios[i] = ratio + } + p.ScaleN(ratios) +} + +// ScaleN multiplies each sample values in a sample by a different amount. +func (p *Profile) ScaleN(ratios []float64) error { + if len(p.SampleType) != len(ratios) { + return fmt.Errorf("mismatched scale ratios, got %d, want %d", len(ratios), len(p.SampleType)) + } + allOnes := true + for _, r := range ratios { + if r != 1 { + allOnes = false + break + } + } + if allOnes { + return nil + } + for _, s := range p.Sample { + for i, v := range s.Value { + if ratios[i] != 1 { + s.Value[i] = int64(float64(v) * ratios[i]) + } + } + } + return nil +} + +// HasFunctions determines if all locations in this profile have +// symbolized function information. +func (p *Profile) HasFunctions() bool { + for _, l := range p.Location { + if l.Mapping != nil && !l.Mapping.HasFunctions { + return false + } + } + return true +} + +// HasFileLines determines if all locations in this profile have +// symbolized file and line number information. +func (p *Profile) HasFileLines() bool { + for _, l := range p.Location { + if l.Mapping != nil && (!l.Mapping.HasFilenames || !l.Mapping.HasLineNumbers) { + return false + } + } + return true +} + +// Copy makes a fully independent copy of a profile. +func (p *Profile) Copy() *Profile { + p.preEncode() + b := marshal(p) + + pp := &Profile{} + if err := unmarshal(b, pp); err != nil { + panic(err) + } + if err := pp.postDecode(); err != nil { + panic(err) + } + + return pp +} diff --git a/src/profile/profile_test.go b/src/profile/profile_test.go new file mode 100644 index 00000000..3b3b1879 --- /dev/null +++ b/src/profile/profile_test.go @@ -0,0 +1,752 @@ +// Copyright 2014 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package profile + +import ( + "bytes" + "fmt" + "io/ioutil" + "path/filepath" + "reflect" + "regexp" + "testing" + + "internal/proftest" +) + +func TestParse(t *testing.T) { + const path = "testdata/" + + for _, source := range []string{ + "go.crc32.cpu", + "go.godoc.thread", + "gobench.cpu", + "gobench.heap", + "cppbench.cpu", + "cppbench.heap", + "cppbench.contention", + "cppbench.growth", + "cppbench.thread", + "cppbench.thread.all", + "cppbench.thread.none", + "java.cpu", + "java.heap", + "java.contention", + } { + inbytes, err := ioutil.ReadFile(filepath.Join(path, source)) + if err != nil { + t.Fatal(err) + } + p, err := Parse(bytes.NewBuffer(inbytes)) + if err != nil { + t.Fatalf("%s: %s", source, err) + } + + js := p.String() + goldFilename := path + source + ".string" + gold, err := ioutil.ReadFile(goldFilename) + if err != nil { + t.Fatalf("%s: %v", source, err) + } + + if js != string(gold) { + t.Errorf("diff %s %s", source, goldFilename) + d, err := proftest.Diff(gold, []byte(js)) + if err != nil { + t.Fatalf("%s: %v", source, err) + } + t.Error(source + "\n" + string(d) + "\n" + "new profile at:\n" + leaveTempfile([]byte(js))) + } + + // Reencode and decode. + bw := bytes.NewBuffer(nil) + if err := p.Write(bw); err != nil { + t.Fatalf("%s: %v", source, err) + } + if p, err = Parse(bw); err != nil { + t.Fatalf("%s: %v", source, err) + } + js2 := p.String() + if js2 != string(gold) { + d, err := proftest.Diff(gold, []byte(js2)) + if err != nil { + t.Fatalf("%s: %v", source, err) + } + t.Error(source + "\n" + string(d) + "\n" + "gold:\n" + goldFilename + + "\nnew profile at:\n" + leaveTempfile([]byte(js))) + } + } +} + +// leaveTempfile leaves |b| in a temporary file on disk and returns the +// temp filename. This is useful to recover a profile when the test +// fails. +func leaveTempfile(b []byte) string { + f1, err := ioutil.TempFile("", "profile_test") + if err != nil { + panic(err) + } + if _, err := f1.Write(b); err != nil { + panic(err) + } + return f1.Name() +} + +const mainBinary = "/bin/main" + +var cpuM = []*Mapping{ + { + ID: 1, + Start: 0x10000, + Limit: 0x40000, + File: mainBinary, + HasFunctions: true, + HasFilenames: true, + HasLineNumbers: true, + HasInlineFrames: true, + }, + { + ID: 2, + Start: 0x1000, + Limit: 0x4000, + File: "/lib/lib.so", + HasFunctions: true, + HasFilenames: true, + HasLineNumbers: true, + HasInlineFrames: true, + }, + { + ID: 3, + Start: 0x4000, + Limit: 0x5000, + File: "/lib/lib2_c.so.6", + HasFunctions: true, + HasFilenames: true, + HasLineNumbers: true, + HasInlineFrames: true, + }, + { + ID: 4, + Start: 0x5000, + Limit: 0x9000, + File: "/lib/lib.so_6 (deleted)", + HasFunctions: true, + HasFilenames: true, + HasLineNumbers: true, + HasInlineFrames: true, + }, +} + +var cpuF = []*Function{ + {ID: 1, Name: "main", SystemName: "main", Filename: "main.c"}, + {ID: 2, Name: "foo", SystemName: "foo", Filename: "foo.c"}, + {ID: 3, Name: "foo_caller", SystemName: "foo_caller", Filename: "foo.c"}, +} + +var cpuL = []*Location{ + { + ID: 1000, + Mapping: cpuM[1], + Address: 0x1000, + Line: []Line{ + {Function: cpuF[0], Line: 1}, + }, + }, + { + ID: 2000, + Mapping: cpuM[0], + Address: 0x2000, + Line: []Line{ + {Function: cpuF[1], Line: 2}, + {Function: cpuF[2], Line: 1}, + }, + }, + { + ID: 3000, + Mapping: cpuM[0], + Address: 0x3000, + Line: []Line{ + {Function: cpuF[1], Line: 2}, + {Function: cpuF[2], Line: 1}, + }, + }, + { + ID: 3001, + Mapping: cpuM[0], + Address: 0x3001, + Line: []Line{ + {Function: cpuF[2], Line: 2}, + }, + }, + { + ID: 3002, + Mapping: cpuM[0], + Address: 0x3002, + Line: []Line{ + {Function: cpuF[2], Line: 3}, + }, + }, +} + +var testProfile = &Profile{ + PeriodType: &ValueType{Type: "cpu", Unit: "milliseconds"}, + Period: 1, + DurationNanos: 10e9, + SampleType: []*ValueType{ + {Type: "samples", Unit: "count"}, + {Type: "cpu", Unit: "milliseconds"}, + }, + Sample: []*Sample{ + { + Location: []*Location{cpuL[0]}, + Value: []int64{1000, 1000}, + Label: map[string][]string{ + "key1": []string{"tag1"}, + "key2": []string{"tag1"}, + }, + }, + { + Location: []*Location{cpuL[1], cpuL[0]}, + Value: []int64{100, 100}, + Label: map[string][]string{ + "key1": []string{"tag2"}, + "key3": []string{"tag2"}, + }, + }, + { + Location: []*Location{cpuL[2], cpuL[0]}, + Value: []int64{10, 10}, + Label: map[string][]string{ + "key1": []string{"tag3"}, + "key2": []string{"tag2"}, + }, + }, + { + Location: []*Location{cpuL[3], cpuL[0]}, + Value: []int64{10000, 10000}, + Label: map[string][]string{ + "key1": []string{"tag4"}, + "key2": []string{"tag1"}, + }, + }, + { + Location: []*Location{cpuL[4], cpuL[0]}, + Value: []int64{1, 1}, + Label: map[string][]string{ + "key1": []string{"tag4"}, + "key2": []string{"tag1"}, + }, + }, + }, + Location: cpuL, + Function: cpuF, + Mapping: cpuM, +} + +var aggTests = map[string]aggTest{ + "precise": aggTest{true, true, true, true, 5}, + "fileline": aggTest{false, true, true, true, 4}, + "inline_function": aggTest{false, true, false, true, 3}, + "function": aggTest{false, true, false, false, 2}, +} + +type aggTest struct { + precise, function, fileline, inlineFrame bool + rows int +} + +const totalSamples = int64(11111) + +func TestAggregation(t *testing.T) { + prof := testProfile.Copy() + for _, resolution := range []string{"precise", "fileline", "inline_function", "function"} { + a := aggTests[resolution] + if !a.precise { + if err := prof.Aggregate(a.inlineFrame, a.function, a.fileline, a.fileline, false); err != nil { + t.Error("aggregating to " + resolution + ":" + err.Error()) + } + } + if err := checkAggregation(prof, &a); err != nil { + t.Error("failed aggregation to " + resolution + ": " + err.Error()) + } + } +} + +// checkAggregation verifies that the profile remained consistent +// with its aggregation. +func checkAggregation(prof *Profile, a *aggTest) error { + // Check that the total number of samples for the rows was preserved. + total := int64(0) + + samples := make(map[string]bool) + for _, sample := range prof.Sample { + tb := locationHash(sample) + samples[tb] = true + total += sample.Value[0] + } + + if total != totalSamples { + return fmt.Errorf("sample total %d, want %d", total, totalSamples) + } + + // Check the number of unique sample locations + if a.rows != len(samples) { + return fmt.Errorf("number of samples %d, want %d", len(samples), a.rows) + } + + // Check that all mappings have the right detail flags. + for _, m := range prof.Mapping { + if m.HasFunctions != a.function { + return fmt.Errorf("unexpected mapping.HasFunctions %v, want %v", m.HasFunctions, a.function) + } + if m.HasFilenames != a.fileline { + return fmt.Errorf("unexpected mapping.HasFilenames %v, want %v", m.HasFilenames, a.fileline) + } + if m.HasLineNumbers != a.fileline { + return fmt.Errorf("unexpected mapping.HasLineNumbers %v, want %v", m.HasLineNumbers, a.fileline) + } + if m.HasInlineFrames != a.inlineFrame { + return fmt.Errorf("unexpected mapping.HasInlineFrames %v, want %v", m.HasInlineFrames, a.inlineFrame) + } + } + + // Check that aggregation has removed finer resolution data. + for _, l := range prof.Location { + if !a.inlineFrame && len(l.Line) > 1 { + return fmt.Errorf("found %d lines on location %d, want 1", len(l.Line), l.ID) + } + + for _, ln := range l.Line { + if !a.fileline && (ln.Function.Filename != "" || ln.Line != 0) { + return fmt.Errorf("found line %s:%d on location %d, want :0", + ln.Function.Filename, ln.Line, l.ID) + } + if !a.function && (ln.Function.Name != "") { + return fmt.Errorf(`found file %s location %d, want ""`, + ln.Function.Name, l.ID) + } + } + } + + return nil +} + +func TestParseMappingEntry(t *testing.T) { + for _, test := range []*struct { + entry string + want *Mapping + }{ + { + entry: "00400000-02e00000 r-xp 00000000 00:00 0", + want: &Mapping{ + Start: 0x400000, + Limit: 0x2e00000, + }, + }, + { + entry: "02e00000-02e8a000 r-xp 02a00000 00:00 15953927 /foo/bin", + want: &Mapping{ + Start: 0x2e00000, + Limit: 0x2e8a000, + Offset: 0x2a00000, + File: "/foo/bin", + }, + }, + { + entry: "02e00000-02e8a000 r-xp 000000 00:00 15953927 [vdso]", + want: &Mapping{ + Start: 0x2e00000, + Limit: 0x2e8a000, + File: "[vdso]", + }, + }, + { + entry: " 02e00000-02e8a000: /foo/bin (@2a00000)", + want: &Mapping{ + Start: 0x2e00000, + Limit: 0x2e8a000, + Offset: 0x2a00000, + File: "/foo/bin", + }, + }, + { + entry: " 02e00000-02e8a000: /foo/bin", + want: &Mapping{ + Start: 0x2e00000, + Limit: 0x2e8a000, + File: "/foo/bin", + }, + }, + { + entry: " 02e00000-02e8a000: [vdso]", + want: &Mapping{ + Start: 0x2e00000, + Limit: 0x2e8a000, + File: "[vdso]", + }, + }, + } { + got, err := parseMappingEntry(test.entry) + if err != nil { + t.Error(err) + } + if !reflect.DeepEqual(test.want, got) { + t.Errorf("%s want=%v got=%v", test.entry, test.want, got) + } + } +} + +// Test merge leaves the main binary in place. +func TestMergeMain(t *testing.T) { + prof := testProfile.Copy() + p1, err := Merge([]*Profile{prof}) + if err != nil { + t.Fatalf("merge error: %v", err) + } + if cpuM[0].File != p1.Mapping[0].File { + t.Errorf("want Mapping[0]=%s got %s", cpuM[0].File, p1.Mapping[0].File) + } +} + +func TestMerge(t *testing.T) { + // Aggregate a profile with itself and once again with a factor of + // -2. Should end up with an empty profile (all samples for a + // location should add up to 0). + + prof := testProfile.Copy() + p1, err := Merge([]*Profile{prof, prof}) + if err != nil { + t.Errorf("merge error: %v", err) + } + prof.Scale(-2) + prof, err = Merge([]*Profile{p1, prof}) + if err != nil { + t.Errorf("merge error: %v", err) + } + + // Use aggregation to merge locations at function granularity. + if err := prof.Aggregate(false, true, false, false, false); err != nil { + t.Errorf("aggregating after merge: %v", err) + } + + samples := make(map[string]int64) + for _, s := range prof.Sample { + tb := locationHash(s) + samples[tb] = samples[tb] + s.Value[0] + } + for s, v := range samples { + if v != 0 { + t.Errorf("nonzero value for sample %s: %d", s, v) + } + } +} + +func TestMergeAll(t *testing.T) { + // Aggregate 10 copies of the profile. + profs := make([]*Profile, 10) + for i := 0; i < 10; i++ { + profs[i] = testProfile.Copy() + } + prof, err := Merge(profs) + if err != nil { + t.Errorf("merge error: %v", err) + } + samples := make(map[string]int64) + for _, s := range prof.Sample { + tb := locationHash(s) + samples[tb] = samples[tb] + s.Value[0] + } + for _, s := range testProfile.Sample { + tb := locationHash(s) + if samples[tb] != s.Value[0]*10 { + t.Errorf("merge got wrong value at %s : %d instead of %d", tb, samples[tb], s.Value[0]*10) + } + } +} + +func TestFilter(t *testing.T) { + // Perform several forms of filtering on the test profile. + + type filterTestcase struct { + focus, ignore, hide, show *regexp.Regexp + fm, im, hm, hnm bool + } + + for tx, tc := range []filterTestcase{ + {nil, nil, nil, nil, true, false, false, false}, + {regexp.MustCompile("notfound"), nil, nil, nil, false, false, false, false}, + {nil, regexp.MustCompile("foo.c"), nil, nil, true, true, false, false}, + {nil, nil, regexp.MustCompile("lib.so"), nil, true, false, true, false}, + } { + prof := *testProfile.Copy() + gf, gi, gh, gnh := prof.FilterSamplesByName(tc.focus, tc.ignore, tc.hide, tc.show) + if gf != tc.fm { + t.Errorf("Filter #%d, got fm=%v, want %v", tx, gf, tc.fm) + } + if gi != tc.im { + t.Errorf("Filter #%d, got im=%v, want %v", tx, gi, tc.im) + } + if gh != tc.hm { + t.Errorf("Filter #%d, got hm=%v, want %v", tx, gh, tc.hm) + } + if gnh != tc.hnm { + t.Errorf("Filter #%d, got hnm=%v, want %v", tx, gnh, tc.hnm) + } + } +} + +// locationHash constructs a string to use as a hashkey for a sample, based on its locations +func locationHash(s *Sample) string { + var tb string + for _, l := range s.Location { + for _, ln := range l.Line { + tb = tb + fmt.Sprintf("%s:%d@%d ", ln.Function.Name, ln.Line, l.Address) + } + } + return tb +} + +func TestSetMain(t *testing.T) { + testProfile.massageMappings() + if testProfile.Mapping[0].File != mainBinary { + t.Errorf("got %s for main", testProfile.Mapping[0].File) + } +} + +// Benchmarks + +// benchmarkMerge measures the overhead of merging profiles read from files. +// They must be the same type of profiles. +func benchmarkMerge(b *testing.B, files []string) { + const path = "testdata/" + + p := make([]*Profile, len(files)) + + for i, source := range files { + inBytes, err := ioutil.ReadFile(filepath.Join(path, source)) + if err != nil { + b.Fatal(err) + } + if p[i], err = Parse(bytes.NewBuffer(inBytes)); err != nil { + b.Fatalf("%s: %s", source, err) + } + } + + var prof *Profile + b.ResetTimer() + for i := 0; i < b.N; i++ { + prof, _ = Merge(p) + } + b.StopTimer() + + before := 0 + for _, p := range p { + p.preEncode() + buff := marshal(p) + before += len(buff) + } + prof.preEncode() + buff := marshal(prof) + after := len(buff) + b.Logf("Profile size before merge = %v, After merge = %v", before, after) +} + +// BenchmarkMergeCppCPUMedium measures the overhead of merging two medium CPU +// profiles of a C++ program (muppet). +func BenchmarkMergeCppCPUMedium(b *testing.B) { + files := []string{ + "muppet.profilez.medium.1.pb.gz", + "muppet.profilez.medium.2.pb.gz", + } + + benchmarkMerge(b, files) +} + +// BenchmarkMergeCppHeapMedium measures the overhead of merging two medium Heap +// profiles of a C++ program (muppet). +func BenchmarkMergeCppHeapMedium(b *testing.B) { + files := []string{ + "muppet.heapz.medium.1.pb.gz", + "muppet.heapz.medium.2.pb.gz", + } + + benchmarkMerge(b, files) +} + +// BenchmarkMergeCppContentionMedium measures the overhead of merging two medium +// contention profiles of a C++ program (muppet). +func BenchmarkMergeCppContentionMedium(b *testing.B) { + files := []string{ + "muppet.contentionz.medium.1.pb.gz", + "muppet.contentionz.medium.2.pb.gz", + } + + benchmarkMerge(b, files) +} + +// BenchmarkMergeJavaCPUMedium measures the overhead of merging two medium CPU +// profiles of a Java program (caribou). +func BenchmarkMergeJavaCPUMedium(b *testing.B) { + files := []string{ + "caribou.profilez.medium.1.pb.gz", + "caribou.profilez.medium.2.pb.gz", + } + + benchmarkMerge(b, files) +} + +// BenchmarkMergeJavaHeapMedium measures the overhead of merging two medium Heap +// profiles of a Java program (caribou). +func BenchmarkMergeJavaHeapMedium(b *testing.B) { + files := []string{ + "caribou.heapz.medium.1.pb.gz", + "caribou.heapz.medium.2.pb.gz", + } + + benchmarkMerge(b, files) +} + +// BenchmarkMergeJavaContentionMedium measures the overhead of merging two medium +// contention profiles of a Java program (caribou). +func BenchmarkMergeJavaContentionMedium(b *testing.B) { + files := []string{ + "caribou.contentionz.medium.1.pb.gz", + "caribou.contentionz.medium.2.pb.gz", + } + + benchmarkMerge(b, files) +} + +// BenchmarkMergeCppCPULarge measures the overhead of merging two large CPU +// profiles of a C++ program (muppet). +func BenchmarkMergeCppCPULarge(b *testing.B) { + files := []string{ + "muppet.profilez.large.1.pb.gz", + "muppet.profilez.large.2.pb.gz", + } + + benchmarkMerge(b, files) +} + +// BenchmarkMergeCppHeapLarge measures the overhead of merging two large Heap +// profiles of a C++ program (muppet). +func BenchmarkMergeCppHeapLarge(b *testing.B) { + files := []string{ + "muppet.heapz.large.1.pb.gz", + "muppet.heapz.large.2.pb.gz", + } + + benchmarkMerge(b, files) +} + +// BenchmarkMergeCppContentionLarge measures the overhead of merging two large +// contention profiles of a C++ program (muppet). +func BenchmarkMergeCppContentionLarge(b *testing.B) { + files := []string{ + "muppet.contentionz.large.1.pb.gz", + "muppet.contentionz.large.2.pb.gz", + } + + benchmarkMerge(b, files) +} + +// BenchmarkMergeJavaCPULarge measures the overhead of merging two large CPU +// profiles of a Java program (caribou). +func BenchmarkMergeJavaCPULarge(b *testing.B) { + files := []string{ + "caribou.profilez.large.1.pb.gz", + "caribou.profilez.large.2.pb.gz", + } + + benchmarkMerge(b, files) +} + +// BenchmarkMergeJavaHeapLarge measures the overhead of merging two large Heap +// profiles of a Java program (caribou). +func BenchmarkMergeJavaHeapLarge(b *testing.B) { + files := []string{ + "caribou.heapz.large.1.pb.gz", + "caribou.heapz.large.2.pb.gz", + } + + benchmarkMerge(b, files) +} + +// BenchmarkMergeJavaContentionLarge measures the overhead of merging two large +// contention profiles of a Java program (caribou). +func BenchmarkMergeJavaContentionLarge(b *testing.B) { + files := []string{ + "caribou.contentionz.large.1.pb.gz", + "caribou.contentionz.large.2.pb.gz", + } + + benchmarkMerge(b, files) +} + +// BenchmarkMergeJavaCPUWorst measures the overhead of merging rollups worth 7 days +// for the worst case scenario. These rollups are generated by merging samples +// (10 seconds/min) from /profilez handler of caribou prod jobs. They are deduplicated +// so that Samples, Locations, Mappings, and Functions are unique. +func BenchmarkMergeJavaCPUWorst(b *testing.B) { + files := []string{ + "caribou.profilez.1min.1.pb.gz", + "caribou.profilez.1min.2.pb.gz", + "caribou.profilez.1min.3.pb.gz", + "caribou.profilez.1min.4.pb.gz", + "caribou.profilez.1min.5.pb.gz", + "caribou.profilez.1min.6.pb.gz", + "caribou.profilez.1min.7.pb.gz", + "caribou.profilez.1min.8.pb.gz", + "caribou.profilez.1min.9.pb.gz", + "caribou.profilez.1min.10.pb.gz", + "caribou.profilez.1min.11.pb.gz", + "caribou.profilez.1min.12.pb.gz", + "caribou.profilez.1min.13.pb.gz", + "caribou.profilez.1min.14.pb.gz", + "caribou.profilez.1min.15.pb.gz", + "caribou.profilez.1min.16.pb.gz", + "caribou.profilez.1min.17.pb.gz", + "caribou.profilez.1min.18.pb.gz", + "caribou.profilez.10mins.1.pb.gz", + "caribou.profilez.10mins.2.pb.gz", + "caribou.profilez.10mins.3.pb.gz", + "caribou.profilez.10mins.4.pb.gz", + "caribou.profilez.10mins.5.pb.gz", + "caribou.profilez.10mins.6.pb.gz", + "caribou.profilez.10mins.7.pb.gz", + "caribou.profilez.10mins.8.pb.gz", + "caribou.profilez.10mins.9.pb.gz", + "caribou.profilez.10mins.10.pb.gz", + "caribou.profilez.1hour.1.pb.gz", + "caribou.profilez.1hour.2.pb.gz", + "caribou.profilez.1hour.3.pb.gz", + "caribou.profilez.1hour.4.pb.gz", + "caribou.profilez.1hour.5.pb.gz", + "caribou.profilez.1hour.6.pb.gz", + "caribou.profilez.4hours.1.pb.gz", + "caribou.profilez.4hours.2.pb.gz", + "caribou.profilez.4hours.3.pb.gz", + "caribou.profilez.4hours.4.pb.gz", + "caribou.profilez.12hours.1.pb.gz", + "caribou.profilez.12hours.2.pb.gz", + "caribou.profilez.1day.1.pb.gz", + "caribou.profilez.1day.2.pb.gz", + "caribou.profilez.1day.3.pb.gz", + "caribou.profilez.1day.4.pb.gz", + "caribou.profilez.1day.5.pb.gz", + } + + benchmarkMerge(b, files) +} diff --git a/src/profile/proto.go b/src/profile/proto.go new file mode 100644 index 00000000..568504fe --- /dev/null +++ b/src/profile/proto.go @@ -0,0 +1,370 @@ +// Copyright 2014 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// This file is a simple protocol buffer encoder and decoder. +// +// A protocol message must implement the message interface: +// decoder() []decoder +// encode(*buffer) +// +// The decode method returns a slice indexed by field number that gives the +// function to decode that field. +// The encode method encodes its receiver into the given buffer. +// +// The two methods are simple enough to be implemented by hand rather than +// by using a protocol compiler. +// +// See profile.go for examples of messages implementing this interface. +// +// There is no support for groups, message sets, or "has" bits. + +package profile + +import "errors" + +type buffer struct { + field int + typ int + u64 uint64 + data []byte + tmp [16]byte +} + +type decoder func(*buffer, message) error + +type message interface { + decoder() []decoder + encode(*buffer) +} + +func marshal(m message) []byte { + var b buffer + m.encode(&b) + return b.data +} + +func encodeVarint(b *buffer, x uint64) { + for x >= 128 { + b.data = append(b.data, byte(x)|0x80) + x >>= 7 + } + b.data = append(b.data, byte(x)) +} + +func encodeLength(b *buffer, tag int, len int) { + encodeVarint(b, uint64(tag)<<3|2) + encodeVarint(b, uint64(len)) +} + +func encodeUint64(b *buffer, tag int, x uint64) { + // append varint to b.data + encodeVarint(b, uint64(tag)<<3|0) + encodeVarint(b, x) +} + +func encodeUint64s(b *buffer, tag int, x []uint64) { + if len(x) > 2 { + // Use packed encoding + n1 := len(b.data) + for _, u := range x { + encodeVarint(b, u) + } + n2 := len(b.data) + encodeLength(b, tag, n2-n1) + n3 := len(b.data) + copy(b.tmp[:], b.data[n2:n3]) + copy(b.data[n1+(n3-n2):], b.data[n1:n2]) + copy(b.data[n1:], b.tmp[:n3-n2]) + return + } + for _, u := range x { + encodeUint64(b, tag, u) + } +} + +func encodeUint64Opt(b *buffer, tag int, x uint64) { + if x == 0 { + return + } + encodeUint64(b, tag, x) +} + +func encodeInt64(b *buffer, tag int, x int64) { + u := uint64(x) + encodeUint64(b, tag, u) +} + +func encodeInt64s(b *buffer, tag int, x []int64) { + if len(x) > 2 { + // Use packed encoding + n1 := len(b.data) + for _, u := range x { + encodeVarint(b, uint64(u)) + } + n2 := len(b.data) + encodeLength(b, tag, n2-n1) + n3 := len(b.data) + copy(b.tmp[:], b.data[n2:n3]) + copy(b.data[n1+(n3-n2):], b.data[n1:n2]) + copy(b.data[n1:], b.tmp[:n3-n2]) + return + } + for _, u := range x { + encodeInt64(b, tag, u) + } +} + +func encodeInt64Opt(b *buffer, tag int, x int64) { + if x == 0 { + return + } + encodeInt64(b, tag, x) +} + +func encodeString(b *buffer, tag int, x string) { + encodeLength(b, tag, len(x)) + b.data = append(b.data, x...) +} + +func encodeStrings(b *buffer, tag int, x []string) { + for _, s := range x { + encodeString(b, tag, s) + } +} + +func encodeStringOpt(b *buffer, tag int, x string) { + if x == "" { + return + } + encodeString(b, tag, x) +} + +func encodeBool(b *buffer, tag int, x bool) { + if x { + encodeUint64(b, tag, 1) + } else { + encodeUint64(b, tag, 0) + } +} + +func encodeBoolOpt(b *buffer, tag int, x bool) { + if x == false { + return + } + encodeBool(b, tag, x) +} + +func encodeMessage(b *buffer, tag int, m message) { + n1 := len(b.data) + m.encode(b) + n2 := len(b.data) + encodeLength(b, tag, n2-n1) + n3 := len(b.data) + copy(b.tmp[:], b.data[n2:n3]) + copy(b.data[n1+(n3-n2):], b.data[n1:n2]) + copy(b.data[n1:], b.tmp[:n3-n2]) +} + +func unmarshal(data []byte, m message) (err error) { + b := buffer{data: data, typ: 2} + return decodeMessage(&b, m) +} + +func le64(p []byte) uint64 { + return uint64(p[0]) | uint64(p[1])<<8 | uint64(p[2])<<16 | uint64(p[3])<<24 | uint64(p[4])<<32 | uint64(p[5])<<40 | uint64(p[6])<<48 | uint64(p[7])<<56 +} + +func le32(p []byte) uint32 { + return uint32(p[0]) | uint32(p[1])<<8 | uint32(p[2])<<16 | uint32(p[3])<<24 +} + +func decodeVarint(data []byte) (uint64, []byte, error) { + var i int + var u uint64 + for i = 0; ; i++ { + if i >= 10 || i >= len(data) { + return 0, nil, errors.New("bad varint") + } + u |= uint64(data[i]&0x7F) << uint(7*i) + if data[i]&0x80 == 0 { + return u, data[i+1:], nil + } + } +} + +func decodeField(b *buffer, data []byte) ([]byte, error) { + x, data, err := decodeVarint(data) + if err != nil { + return nil, err + } + b.field = int(x >> 3) + b.typ = int(x & 7) + b.data = nil + b.u64 = 0 + switch b.typ { + case 0: + b.u64, data, err = decodeVarint(data) + if err != nil { + return nil, err + } + case 1: + if len(data) < 8 { + return nil, errors.New("not enough data") + } + b.u64 = le64(data[:8]) + data = data[8:] + case 2: + var n uint64 + n, data, err = decodeVarint(data) + if err != nil { + return nil, err + } + if n > uint64(len(data)) { + return nil, errors.New("too much data") + } + b.data = data[:n] + data = data[n:] + case 5: + if len(data) < 4 { + return nil, errors.New("not enough data") + } + b.u64 = uint64(le32(data[:4])) + data = data[4:] + default: + return nil, errors.New("unknown type: " + string(b.typ)) + } + + return data, nil +} + +func checkType(b *buffer, typ int) error { + if b.typ != typ { + return errors.New("type mismatch") + } + return nil +} + +func decodeMessage(b *buffer, m message) error { + if err := checkType(b, 2); err != nil { + return err + } + dec := m.decoder() + data := b.data + for len(data) > 0 { + // pull varint field# + type + var err error + data, err = decodeField(b, data) + if err != nil { + return err + } + if b.field >= len(dec) || dec[b.field] == nil { + continue + } + if err := dec[b.field](b, m); err != nil { + return err + } + } + return nil +} + +func decodeInt64(b *buffer, x *int64) error { + if err := checkType(b, 0); err != nil { + return err + } + *x = int64(b.u64) + return nil +} + +func decodeInt64s(b *buffer, x *[]int64) error { + if b.typ == 2 { + // Packed encoding + data := b.data + for len(data) > 0 { + var u uint64 + var err error + + if u, data, err = decodeVarint(data); err != nil { + return err + } + *x = append(*x, int64(u)) + } + return nil + } + var i int64 + if err := decodeInt64(b, &i); err != nil { + return err + } + *x = append(*x, i) + return nil +} + +func decodeUint64(b *buffer, x *uint64) error { + if err := checkType(b, 0); err != nil { + return err + } + *x = b.u64 + return nil +} + +func decodeUint64s(b *buffer, x *[]uint64) error { + if b.typ == 2 { + data := b.data + // Packed encoding + for len(data) > 0 { + var u uint64 + var err error + + if u, data, err = decodeVarint(data); err != nil { + return err + } + *x = append(*x, u) + } + return nil + } + var u uint64 + if err := decodeUint64(b, &u); err != nil { + return err + } + *x = append(*x, u) + return nil +} + +func decodeString(b *buffer, x *string) error { + if err := checkType(b, 2); err != nil { + return err + } + *x = string(b.data) + return nil +} + +func decodeStrings(b *buffer, x *[]string) error { + var s string + if err := decodeString(b, &s); err != nil { + return err + } + *x = append(*x, s) + return nil +} + +func decodeBool(b *buffer, x *bool) error { + if err := checkType(b, 0); err != nil { + return err + } + if int64(b.u64) == 0 { + *x = false + } else { + *x = true + } + return nil +} diff --git a/src/profile/proto_test.go b/src/profile/proto_test.go new file mode 100644 index 00000000..022f5d14 --- /dev/null +++ b/src/profile/proto_test.go @@ -0,0 +1,146 @@ +// Copyright 2014 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package profile + +import ( + "bytes" + "testing" + + "internal/proftest" +) + +var testM = []*Mapping{ + { + ID: 1, + Start: 1, + Limit: 10, + Offset: 0, + File: "file1", + BuildID: "buildid1", + HasFunctions: true, + HasFilenames: true, + HasLineNumbers: true, + HasInlineFrames: true, + }, + { + ID: 2, + Start: 10, + Limit: 30, + Offset: 9, + File: "file1", + BuildID: "buildid2", + HasFunctions: true, + HasFilenames: true, + HasLineNumbers: true, + HasInlineFrames: true, + }, +} + +var testF = []*Function{ + {ID: 1, Name: "func1", SystemName: "func1", Filename: "file1"}, + {ID: 2, Name: "func2", SystemName: "func2", Filename: "file1"}, + {ID: 3, Name: "func3", SystemName: "func3", Filename: "file2"}, +} + +var testL = []*Location{ + { + ID: 1, + Address: 1, + Mapping: testM[0], + Line: []Line{ + { + Function: testF[0], + Line: 2, + }, + { + Function: testF[1], + Line: 2222222, + }, + }, + }, + { + ID: 2, + Mapping: testM[1], + Address: 11, + Line: []Line{ + { + Function: testF[2], + Line: 2, + }, + }, + }, + { + ID: 3, + Mapping: testM[1], + Address: 12, + }, +} + +var all = &Profile{ + PeriodType: &ValueType{Type: "cpu", Unit: "milliseconds"}, + Period: 10, + DurationNanos: 10e9, + SampleType: []*ValueType{ + {Type: "cpu", Unit: "cycles"}, + {Type: "object", Unit: "count"}, + }, + Sample: []*Sample{ + { + Location: []*Location{testL[0], testL[1], testL[2], testL[1], testL[1]}, + Label: map[string][]string{ + "key1": []string{"value1"}, + "key2": []string{"value2"}, + }, + Value: []int64{10, 20}, + }, + { + Location: []*Location{testL[1], testL[2], testL[0], testL[1]}, + Value: []int64{30, 40}, + Label: map[string][]string{ + "key1": []string{"value1"}, + "key2": []string{"value2"}, + }, + NumLabel: map[string][]int64{ + "key1": []int64{1, 2}, + "key2": []int64{3, 4}, + }, + }, + }, + Function: testF, + Mapping: testM, + Location: testL, + Comments: []string{"Comment 1", "Comment 2"}, +} + +func TestMarshalUnmarshal(t *testing.T) { + // Write the profile, parse it, and ensure they're equal. + buf := bytes.NewBuffer(nil) + all.Write(buf) + all2, err := Parse(buf) + if err != nil { + t.Fatal(err) + } + + js1 := proftest.EncodeJSON(&all) + js2 := proftest.EncodeJSON(&all2) + if string(js1) != string(js2) { + t.Errorf("profiles differ") + d, err := proftest.Diff(js1, js2) + if err != nil { + t.Fatal(err) + } + t.Error("\n" + string(d)) + } +} diff --git a/src/profile/prune.go b/src/profile/prune.go new file mode 100644 index 00000000..4b46373c --- /dev/null +++ b/src/profile/prune.go @@ -0,0 +1,155 @@ +// Copyright 2014 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Implements methods to remove frames from profiles. + +package profile + +import ( + "fmt" + "regexp" + "strings" +) + +// Prune removes all nodes beneath a node matching dropRx, and not +// matching keepRx. If the root node of a Sample matches, the sample +// will have an empty stack. +func (p *Profile) Prune(dropRx, keepRx *regexp.Regexp) { + prune := make(map[uint64]bool) + pruneBeneath := make(map[uint64]bool) + + for _, loc := range p.Location { + var i int + for i = len(loc.Line) - 1; i >= 0; i-- { + if fn := loc.Line[i].Function; fn != nil && fn.Name != "" { + // Account for leading '.' on the PPC ELF v1 ABI. + funcName := strings.TrimPrefix(fn.Name, ".") + // Account for unsimplified names -- trim starting from the first '('. + if index := strings.Index(funcName, "("); index > 0 { + funcName = funcName[:index] + } + if dropRx.MatchString(funcName) { + if keepRx == nil || !keepRx.MatchString(funcName) { + break + } + } + } + } + + if i >= 0 { + // Found matching entry to prune. + pruneBeneath[loc.ID] = true + + // Remove the matching location. + if i == len(loc.Line)-1 { + // Matched the top entry: prune the whole location. + prune[loc.ID] = true + } else { + loc.Line = loc.Line[i+1:] + } + } + } + + // Prune locs from each Sample + for _, sample := range p.Sample { + // Scan from the root to the leaves to find the prune location. + // Do not prune frames before the first user frame, to avoid + // pruning everything. + foundUser := false + for i := len(sample.Location) - 1; i >= 0; i-- { + id := sample.Location[i].ID + if !prune[id] && !pruneBeneath[id] { + foundUser = true + continue + } + if !foundUser { + continue + } + if prune[id] { + sample.Location = sample.Location[i+1:] + break + } + if pruneBeneath[id] { + sample.Location = sample.Location[i:] + break + } + } + } +} + +// RemoveUninteresting prunes and elides profiles using built-in +// tables of uninteresting function names. +func (p *Profile) RemoveUninteresting() error { + var keep, drop *regexp.Regexp + var err error + + if p.DropFrames != "" { + if drop, err = regexp.Compile("^(" + p.DropFrames + ")$"); err != nil { + return fmt.Errorf("failed to compile regexp %s: %v", p.DropFrames, err) + } + if p.KeepFrames != "" { + if keep, err = regexp.Compile("^(" + p.KeepFrames + ")$"); err != nil { + return fmt.Errorf("failed to compile regexp %s: %v", p.KeepFrames, err) + } + } + p.Prune(drop, keep) + } + return nil +} + +// PruneFrom removes all nodes beneath the lowest node matching dropRx, not including itself. +// +// Please see the example below to understand this method as well as +// the difference from Prune method. +// +// A sample contains Location of [A,B,C,B,D] where D is the top frame and there's no inline. +// +// PruneFrom(A) returns [A,B,C,B,D] because there's no node beneath A. +// Prune(A, nil) returns [B,C,B,D] by removing A itself. +// +// PruneFrom(B) returns [B,C,B,D] by removing all nodes beneath the first B when scanning from the bottom. +// Prune(B, nil) returns [D] because a matching node is found by scanning from the root. +func (p *Profile) PruneFrom(dropRx *regexp.Regexp) { + pruneBeneath := make(map[uint64]bool) + + for _, loc := range p.Location { + for i := 0; i < len(loc.Line); i++ { + if fn := loc.Line[i].Function; fn != nil && fn.Name != "" { + // Account for leading '.' on the PPC ELF v1 ABI. + funcName := strings.TrimPrefix(fn.Name, ".") + // Account for unsimplified names -- trim starting from the first '('. + if index := strings.Index(funcName, "("); index > 0 { + funcName = funcName[:index] + } + if dropRx.MatchString(funcName) { + // Found matching entry to prune. + pruneBeneath[loc.ID] = true + loc.Line = loc.Line[i:] + break + } + } + } + } + + // Prune locs from each Sample + for _, sample := range p.Sample { + // Scan from the bottom leaf to the root to find the prune location. + for i, loc := range sample.Location { + if pruneBeneath[loc.ID] { + sample.Location = sample.Location[i:] + break + } + } + } +} diff --git a/src/profile/prune_test.go b/src/profile/prune_test.go new file mode 100644 index 00000000..58fa25ee --- /dev/null +++ b/src/profile/prune_test.go @@ -0,0 +1,139 @@ +// Copyright 2014 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package profile + +import ( + "strings" + "testing" +) + +func TestPrune(t *testing.T) { + for _, test := range []struct { + in *Profile + want string + }{ + {in1, out1}, + } { + in := test.in.Copy() + in.RemoveUninteresting() + if err := in.CheckValid(); err != nil { + t.Error(err) + } + w := strings.Split(test.want, "\n") + for i, g := range strings.Split(in.String(), "\n") { + if i >= len(w) { + t.Fatalf("got trailing %s", g) + } + if strings.TrimSpace(g) != strings.TrimSpace(w[i]) { + t.Fatalf(`%d: got: "%s" want:"%s"`, i, g, w[i]) + } + } + } +} + +var funs = []*Function{ + {ID: 1, Name: "main", SystemName: "main", Filename: "main.c"}, + {ID: 2, Name: "fun1", SystemName: "fun1", Filename: "fun.c"}, + {ID: 3, Name: "fun2", SystemName: "fun2", Filename: "fun.c"}, + {ID: 4, Name: "fun3", SystemName: "fun3", Filename: "fun.c"}, + {ID: 5, Name: "fun4", SystemName: "fun4", Filename: "fun.c"}, + {ID: 6, Name: "fun5", SystemName: "fun5", Filename: "fun.c"}, +} + +var locs1 = []*Location{ + { + ID: 1, + Line: []Line{ + {Function: funs[0], Line: 1}, + }, + }, + { + ID: 2, + Line: []Line{ + {Function: funs[1], Line: 2}, + {Function: funs[2], Line: 1}, + }, + }, + { + ID: 3, + Line: []Line{ + {Function: funs[3], Line: 2}, + {Function: funs[1], Line: 1}, + }, + }, + { + ID: 4, + Line: []Line{ + {Function: funs[3], Line: 2}, + {Function: funs[1], Line: 2}, + {Function: funs[5], Line: 2}, + }, + }, +} + +var in1 = &Profile{ + PeriodType: &ValueType{Type: "cpu", Unit: "milliseconds"}, + Period: 1, + DurationNanos: 10e9, + SampleType: []*ValueType{ + {Type: "samples", Unit: "count"}, + {Type: "cpu", Unit: "milliseconds"}, + }, + Sample: []*Sample{ + { + Location: []*Location{locs1[0]}, + Value: []int64{1, 1}, + }, + { + Location: []*Location{locs1[1], locs1[0]}, + Value: []int64{1, 1}, + }, + { + Location: []*Location{locs1[2], locs1[0]}, + Value: []int64{1, 1}, + }, + { + Location: []*Location{locs1[3], locs1[0]}, + Value: []int64{1, 1}, + }, + { + Location: []*Location{locs1[3], locs1[2], locs1[1], locs1[0]}, + Value: []int64{1, 1}, + }, + }, + Location: locs1, + Function: funs, + DropFrames: "fu.*[12]|banana", + KeepFrames: ".*[n2][n2]", +} + +const out1 = `PeriodType: cpu milliseconds +Period: 1 +Duration: 10s +Samples: +samples/count cpu/milliseconds + 1 1: 1 + 1 1: 2 1 + 1 1: 1 + 1 1: 4 1 + 1 1: 2 1 +Locations + 1: 0x0 main main.c:1 s=0 + 2: 0x0 fun2 fun.c:1 s=0 + 3: 0x0 fun3 fun.c:2 s=0 + fun1 fun.c:1 s=0 + 4: 0x0 fun5 fun.c:2 s=0 +Mappings +` diff --git a/src/profile/testdata/cppbench.contention b/src/profile/testdata/cppbench.contention new file mode 100644 index 00000000..66a64c95 --- /dev/null +++ b/src/profile/testdata/cppbench.contention @@ -0,0 +1,24 @@ +--- contentionz 1 --- +cycles/second = 3201000000 +sampling period = 100 +ms since reset = 16502830 +discarded samples = 0 + 19490304 27 @ 0xbccc97 0xc61202 0x42ed5f 0x42edc1 0x42e15a 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 768 1 @ 0xbccc97 0xa42dc7 0xa456e4 0x7fcdc2ff214e + 5760 2 @ 0xbccc97 0xb82b73 0xb82bcb 0xb87eab 0xb8814c 0x4e969d 0x4faa17 0x4fc5f6 0x4fd028 0x4fd230 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 569088 1 @ 0xbccc97 0xb82b73 0xb82bcb 0xb87f08 0xb8814c 0x42ed5f 0x42edc1 0x42e15a 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 2432 1 @ 0xbccc97 0xb82b73 0xb82bcb 0xb87eab 0xb8814c 0x7aa74c 0x7ab844 0x7ab914 0x79e9e9 0x79e326 0x4d299e 0x4d4b7b 0x4b7be8 0x4b7ff1 0x4d2dae 0x79e80a + 2034816 3 @ 0xbccc97 0xb82f0f 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e +--- Memory map: --- + 00400000-00fcb000: cppbench_server_main + 7fcdc231e000-7fcdc2321000: /libnss_cache-2.15.so + 7fcdc2522000-7fcdc252e000: /libnss_files-2.15.so + 7fcdc272f000-7fcdc28dd000: /libc-2.15.so + 7fcdc2ae7000-7fcdc2be2000: /libm-2.15.so + 7fcdc2de3000-7fcdc2dea000: /librt-2.15.so + 7fcdc2feb000-7fcdc3003000: /libpthread-2.15.so + 7fcdc3208000-7fcdc320a000: /libdl-2.15.so + 7fcdc340c000-7fcdc3415000: /libcrypt-2.15.so + 7fcdc3645000-7fcdc3669000: /ld-2.15.so + 7fff86bff000-7fff86c00000: [vdso] + ffffffffff600000-ffffffffff601000: [vsyscall] diff --git a/src/profile/testdata/cppbench.contention.string b/src/profile/testdata/cppbench.contention.string new file mode 100644 index 00000000..1c3a3ede --- /dev/null +++ b/src/profile/testdata/cppbench.contention.string @@ -0,0 +1,65 @@ +PeriodType: contentions count +Period: 100 +Duration: 4h35 +Samples: +contentions/count delay/nanoseconds + 2700 608881724: 1 2 3 4 5 6 7 8 9 10 11 12 13 + 100 23992: 1 14 12 13 + 200 179943: 1 15 16 17 18 19 20 21 22 23 9 10 11 12 13 + 100 17778444: 1 15 16 24 18 3 4 5 6 7 8 9 10 11 12 13 + 100 75976: 1 15 16 17 18 25 26 27 28 29 30 31 32 33 34 9 + 300 63568134: 1 35 36 37 38 39 40 6 7 8 9 10 11 12 13 +Locations + 1: 0xbccc97 M=1 + 2: 0xc61201 M=1 + 3: 0x42ed5e M=1 + 4: 0x42edc0 M=1 + 5: 0x42e159 M=1 + 6: 0x5261ae M=1 + 7: 0x526ede M=1 + 8: 0x5280aa M=1 + 9: 0x79e809 M=1 + 10: 0x7a251a M=1 + 11: 0x7a296c M=1 + 12: 0xa456e3 M=1 + 13: 0x7fcdc2ff214d M=7 + 14: 0xa42dc6 M=1 + 15: 0xb82b72 M=1 + 16: 0xb82bca M=1 + 17: 0xb87eaa M=1 + 18: 0xb8814b M=1 + 19: 0x4e969c M=1 + 20: 0x4faa16 M=1 + 21: 0x4fc5f5 M=1 + 22: 0x4fd027 M=1 + 23: 0x4fd22f M=1 + 24: 0xb87f07 M=1 + 25: 0x7aa74b M=1 + 26: 0x7ab843 M=1 + 27: 0x7ab913 M=1 + 28: 0x79e9e8 M=1 + 29: 0x79e325 M=1 + 30: 0x4d299d M=1 + 31: 0x4d4b7a M=1 + 32: 0x4b7be7 M=1 + 33: 0x4b7ff0 M=1 + 34: 0x4d2dad M=1 + 35: 0xb82f0e M=1 + 36: 0xb83002 M=1 + 37: 0xb87d4f M=1 + 38: 0xc635ef M=1 + 39: 0x42ecc2 M=1 + 40: 0x42e14b M=1 +Mappings +1: 0x400000/0xfcb000/0x0 cppbench_server_main +2: 0x7fcdc231e000/0x7fcdc2321000/0x0 /libnss_cache-2.15.so +3: 0x7fcdc2522000/0x7fcdc252e000/0x0 /libnss_files-2.15.so +4: 0x7fcdc272f000/0x7fcdc28dd000/0x0 /libc-2.15.so +5: 0x7fcdc2ae7000/0x7fcdc2be2000/0x0 /libm-2.15.so +6: 0x7fcdc2de3000/0x7fcdc2dea000/0x0 /librt-2.15.so +7: 0x7fcdc2feb000/0x7fcdc3003000/0x0 /libpthread-2.15.so +8: 0x7fcdc3208000/0x7fcdc320a000/0x0 /libdl-2.15.so +9: 0x7fcdc340c000/0x7fcdc3415000/0x0 /libcrypt-2.15.so +10: 0x7fcdc3645000/0x7fcdc3669000/0x0 /ld-2.15.so +11: 0x7fff86bff000/0x7fff86c00000/0x0 [vdso] +12: 0xffffffffff600000/0xffffffffff601000/0x0 [vsyscall] diff --git a/src/profile/testdata/cppbench.cpu b/src/profile/testdata/cppbench.cpu new file mode 100644 index 00000000..607015ee Binary files /dev/null and b/src/profile/testdata/cppbench.cpu differ diff --git a/src/profile/testdata/cppbench.cpu.string b/src/profile/testdata/cppbench.cpu.string new file mode 100644 index 00000000..251f913c --- /dev/null +++ b/src/profile/testdata/cppbench.cpu.string @@ -0,0 +1,179 @@ +PeriodType: cpu nanoseconds +Period: 10000000 +Samples: +samples/count cpu/nanoseconds + 1 10000000: 1 2 3 4 5 6 7 8 9 10 + 1 10000000: 11 2 3 4 5 6 7 8 9 10 + 1 10000000: 1 2 3 4 5 6 7 8 9 10 + 1 10000000: 12 13 14 15 16 17 18 3 4 5 6 7 8 9 10 + 542 5420000000: 19 17 18 3 4 5 6 7 8 9 10 + 1 10000000: 20 17 18 3 4 5 6 7 8 9 10 + 10 100000000: 21 17 18 3 4 5 6 7 8 9 10 + 1 10000000: 22 17 18 3 4 5 6 7 8 9 10 + 1 10000000: 23 24 25 2 3 4 5 6 7 8 9 10 + 3 30000000: 26 16 17 18 3 4 5 6 7 8 9 10 + 1 10000000: 27 16 17 18 3 4 5 6 7 8 9 10 + 2 20000000: 28 16 17 18 3 4 5 6 7 8 9 10 + 1 10000000: 29 16 17 18 3 4 5 6 7 8 9 10 + 1 10000000: 30 31 32 33 34 35 36 37 38 9 10 + 3 30000000: 39 40 41 24 25 2 3 4 5 6 7 8 9 10 + 2 20000000: 42 40 41 24 25 2 3 4 5 6 7 8 9 10 + 1 10000000: 43 40 41 24 25 2 3 4 5 6 7 8 9 10 + 2 20000000: 44 45 41 24 25 2 3 4 5 6 7 8 9 10 + 67 670000000: 46 2 3 4 5 6 7 8 9 10 + 20 200000000: 47 2 3 4 5 6 7 8 9 10 + 12 120000000: 48 2 3 4 5 6 7 8 9 10 + 5 50000000: 11 2 3 4 5 6 7 8 9 10 + 1 10000000: 49 10 + 1 10000000: 50 51 52 13 14 15 16 17 18 3 4 5 6 7 8 9 10 + 2 20000000: 53 51 52 13 14 15 16 17 18 3 4 5 6 7 8 9 10 + 1 10000000: 54 14 15 16 17 18 3 4 5 6 7 8 9 10 + 1 10000000: 55 56 57 58 4 5 6 7 8 9 10 + 1 10000000: 59 41 24 25 2 3 4 5 6 7 8 9 10 + 1 10000000: 60 41 24 25 2 3 4 5 6 7 8 9 10 + 1 10000000: 61 62 63 64 40 41 24 25 2 3 4 5 6 7 8 9 10 + 1 10000000: 65 66 67 68 69 70 71 72 73 74 75 37 38 9 10 + 1 10000000: 76 13 77 15 16 17 18 3 4 5 6 7 8 9 10 + 2 20000000: 78 15 16 17 18 3 4 5 6 7 8 9 10 + 1 10000000: 79 15 16 17 18 3 4 5 6 7 8 9 10 + 1 10000000: 80 13 77 15 16 17 18 3 4 5 6 7 8 9 10 + 1 10000000: 81 15 16 17 18 3 4 5 6 7 8 9 10 + 1 10000000: 82 13 14 15 16 17 18 3 4 5 6 7 8 9 10 + 1 10000000: 83 13 77 15 16 17 18 3 4 5 6 7 8 9 10 + 1 10000000: 83 13 14 15 16 17 18 3 4 5 6 7 8 9 10 + 1 10000000: 30 84 85 86 9 10 + 1 10000000: 87 88 40 41 24 25 2 3 4 5 6 7 8 9 10 + 1 10000000: 89 90 91 92 8 9 10 + 1 10000000: 30 93 8 9 10 + 1 10000000: 30 84 94 9 10 + 1 10000000: 95 3 4 5 6 7 8 9 10 + 1 10000000: 96 97 3 4 5 6 7 8 9 10 + 1 10000000: 98 25 2 3 4 5 6 7 8 9 10 + 1 10000000: 99 25 2 3 4 5 6 7 8 9 10 + 1 10000000: 100 101 102 41 24 25 2 3 4 5 6 7 8 9 10 + 2 20000000: 103 104 91 92 8 9 10 + 1 10000000: 105 104 91 92 8 9 10 + 1 10000000: 106 107 108 109 97 3 4 5 6 7 8 9 10 +Locations + 1: 0x42ef04 M=1 + 2: 0x42e14b M=1 + 3: 0x5261ae M=1 + 4: 0x526ede M=1 + 5: 0x5280aa M=1 + 6: 0x79e809 M=1 + 7: 0x7a251a M=1 + 8: 0x7a296c M=1 + 9: 0xa456e3 M=1 + 10: 0x7f5e541460fd M=7 + 11: 0x42ef17 M=1 + 12: 0xb867c0 M=1 + 13: 0xb82bca M=1 + 14: 0xb87eaa M=1 + 15: 0xb8814b M=1 + 16: 0x42ed5e M=1 + 17: 0x42edc0 M=1 + 18: 0x42e159 M=1 + 19: 0x42ed43 M=1 + 20: 0xc60ea0 M=1 + 21: 0x42ed40 M=1 + 22: 0xbf42fe M=1 + 23: 0xb87d6f M=1 + 24: 0xc635ef M=1 + 25: 0x42ecc2 M=1 + 26: 0xc60f0f M=1 + 27: 0xc610d7 M=1 + 28: 0xc61108 M=1 + 29: 0xb8816e M=1 + 30: 0xbc8f1c M=1 + 31: 0xbcae54 M=1 + 32: 0xbcb5f4 M=1 + 33: 0x40b687 M=1 + 34: 0x535244 M=1 + 35: 0x536bf4 M=1 + 36: 0x42eb0f M=1 + 37: 0x42de64 M=1 + 38: 0xa41281 M=1 + 39: 0xb82dea M=1 + 40: 0xb83002 M=1 + 41: 0xb87d4f M=1 + 42: 0xb82df1 M=1 + 43: 0xb82dd3 M=1 + 44: 0xb82c23 M=1 + 45: 0xb82fd1 M=1 + 46: 0x42ef13 M=1 + 47: 0x42ef0b M=1 + 48: 0x42ef0f M=1 + 49: 0x7f5e53999f13 M=4 + 50: 0xb8591b M=1 + 51: 0xb85e48 M=1 + 52: 0xb82ae3 M=1 + 53: 0xb85893 M=1 + 54: 0xb88cdc M=1 + 55: 0x698000 M=1 + 56: 0x653f4b M=1 + 57: 0x54dc65 M=1 + 58: 0x525120 M=1 + 59: 0xb88d84 M=1 + 60: 0xb88d98 M=1 + 61: 0xb86591 M=1 + 62: 0xb859de M=1 + 63: 0xb862de M=1 + 64: 0xb82d5e M=1 + 65: 0x967171 M=1 + 66: 0x964990 M=1 + 67: 0x448584 M=1 + 68: 0x5476d7 M=1 + 69: 0x4f1be0 M=1 + 70: 0x4f34db M=1 + 71: 0x4f8a9a M=1 + 72: 0x5388df M=1 + 73: 0x573c5a M=1 + 74: 0x4a4168 M=1 + 75: 0x42eb03 M=1 + 76: 0xb82a31 M=1 + 77: 0xb87f07 M=1 + 78: 0xb87e76 M=1 + 79: 0xb87e7e M=1 + 80: 0xb82a36 M=1 + 81: 0xb87ede M=1 + 82: 0xb82a55 M=1 + 83: 0xb82b08 M=1 + 84: 0xbcbcff M=1 + 85: 0xbcbea4 M=1 + 86: 0xa40112 M=1 + 87: 0xb85e87 M=1 + 88: 0xb82d77 M=1 + 89: 0x79eb32 M=1 + 90: 0x7a18e8 M=1 + 91: 0x7a1c44 M=1 + 92: 0x7a2726 M=1 + 93: 0x7a2690 M=1 + 94: 0x89f186 M=1 + 95: 0xc60eb7 M=1 + 96: 0x521c7f M=1 + 97: 0x5194c8 M=1 + 98: 0xc634f0 M=1 + 99: 0xc63245 M=1 + 100: 0xb867d8 M=1 + 101: 0xb82cf2 M=1 + 102: 0xb82f82 M=1 + 103: 0x7f5e538b9a93 M=4 + 104: 0x7a1955 M=1 + 105: 0x7f5e538b9a97 M=4 + 106: 0x7e0f10 M=1 + 107: 0x7e0b5d M=1 + 108: 0x6ab44f M=1 + 109: 0x521d51 M=1 +Mappings +1: 0x400000/0xfcb000/0x0 cppbench_server_main +2: 0x7f5e53061000/0x7f5e53062000/0x0 /lib/libnss_borg-2.15.so +3: 0x7f5e53264000/0x7f5e53270000/0x0 /lib/libnss_files-2.15.so +4: 0x7f5e53883000/0x7f5e53a31000/0x0 /lib/libc-2.15.so +5: 0x7f5e53c3b000/0x7f5e53d36000/0x0 /lib/libm-2.15.so +6: 0x7f5e53f37000/0x7f5e53f3e000/0x0 /lib/librt-2.15.so +7: 0x7f5e5413f000/0x7f5e54157000/0x0 /lib/libpthread-2.15.so +8: 0x7f5e5435c000/0x7f5e5435e000/0x0 /lib/libdl-2.15.so +9: 0x7f5e54560000/0x7f5e54569000/0x0 /lib/libcrypt-2.15.so +10: 0x7f5e54799000/0x7f5e547bd000/0x0 /lib/ld-2.15.so +11: 0x7ffffb56b000/0x7ffffb56d000/0x0 [vdso] +12: 0xffffffffff600000/0xffffffffff601000/0x0 [vsyscall] diff --git a/src/profile/testdata/cppbench.growth b/src/profile/testdata/cppbench.growth new file mode 100644 index 00000000..d06f78b0 --- /dev/null +++ b/src/profile/testdata/cppbench.growth @@ -0,0 +1,99 @@ +heap profile: 85: 178257920 [ 85: 178257920] @ growthz + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xc635c8 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0xafc0eb 0xb087b1 0xb0aa7d 0xb0b374 0xb12f10 0xb13a92 0xb0c443 0xb145f3 0xb147ca 0xa5dddd 0xbbffe6 0xa5e837 0xa65f94 0x5aac9e 0x535526 0x535144 0x5aa468 0x7e3ce7 0x7d13a2 0x7e0d28 0x6ab450 0x538d27 0x5390e8 0x5391e3 0x4e9603 0x4faa17 0x4fc5f6 + 1: 2097152 [ 1: 2097152] @ 0xc635c8 0x816900 0x8149fd 0x813aa0 0xbbff77 0x81421c 0x4ed414 0x4fd707 0x4de2a2 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7fcdc2ff214e + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0xbb5783 0x40acd8 0x61192e 0x4b9522 0x4b9f62 0x4ba025 0x40bd86 0x7fcdc276711d + 1: 2097152 [ 1: 2097152] @ 0xb83003 0xb87d50 0xc635f0 0x42d576 0xc25cc6 0x40651b +--- Memory map: --- + 00400000-00fcb000: cppbench_server_main + 7fcdc231e000-7fcdc2321000: /libnss_cache-2.15.so + 7fcdc2522000-7fcdc252e000: /libnss_files-2.15.so + 7fcdc272f000-7fcdc28dd000: /libc-2.15.so + 7fcdc2ae7000-7fcdc2be2000: /libm-2.15.so + 7fcdc2de3000-7fcdc2dea000: /librt-2.15.so + 7fcdc2feb000-7fcdc3003000: /libpthread-2.15.so + 7fcdc3208000-7fcdc320a000: /libdl-2.15.so + 7fcdc340c000-7fcdc3415000: /libcrypt-2.15.so + 7fcdc3645000-7fcdc3669000: /ld-2.15.so + 7fff86bff000-7fff86c00000: [vdso] + ffffffffff600000-ffffffffff601000: [vsyscall] diff --git a/src/profile/testdata/cppbench.growth.string b/src/profile/testdata/cppbench.growth.string new file mode 100644 index 00000000..ff5214d9 --- /dev/null +++ b/src/profile/testdata/cppbench.growth.string @@ -0,0 +1,248 @@ +PeriodType: space bytes +Period: 1 +Samples: +objects/count space/bytes + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 14 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 4 5 6 7 8 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 + bytes:[2097152] + 1 2097152: 14 42 43 44 45 46 47 48 49 9 10 11 12 13 + bytes:[2097152] + 1 2097152: 1 2 3 50 51 52 53 54 55 56 57 + bytes:[2097152] + 1 2097152: 1 2 3 58 59 60 + bytes:[2097152] +Locations + 1: 0xb83003 M=1 + 2: 0xb87d4f M=1 + 3: 0xc635ef M=1 + 4: 0x42ecc2 M=1 + 5: 0x42e14b M=1 + 6: 0x5261ae M=1 + 7: 0x526ede M=1 + 8: 0x5280aa M=1 + 9: 0x79e809 M=1 + 10: 0x7a251a M=1 + 11: 0x7a296c M=1 + 12: 0xa456e3 M=1 + 13: 0x7fcdc2ff214d M=7 + 14: 0xc635c8 M=1 + 15: 0xafc0ea M=1 + 16: 0xb087b0 M=1 + 17: 0xb0aa7c M=1 + 18: 0xb0b373 M=1 + 19: 0xb12f0f M=1 + 20: 0xb13a91 M=1 + 21: 0xb0c442 M=1 + 22: 0xb145f2 M=1 + 23: 0xb147c9 M=1 + 24: 0xa5dddc M=1 + 25: 0xbbffe5 M=1 + 26: 0xa5e836 M=1 + 27: 0xa65f93 M=1 + 28: 0x5aac9d M=1 + 29: 0x535525 M=1 + 30: 0x535143 M=1 + 31: 0x5aa467 M=1 + 32: 0x7e3ce6 M=1 + 33: 0x7d13a1 M=1 + 34: 0x7e0d27 M=1 + 35: 0x6ab44f M=1 + 36: 0x538d26 M=1 + 37: 0x5390e7 M=1 + 38: 0x5391e2 M=1 + 39: 0x4e9602 M=1 + 40: 0x4faa16 M=1 + 41: 0x4fc5f5 M=1 + 42: 0x8168ff M=1 + 43: 0x8149fc M=1 + 44: 0x813a9f M=1 + 45: 0xbbff76 M=1 + 46: 0x81421b M=1 + 47: 0x4ed413 M=1 + 48: 0x4fd706 M=1 + 49: 0x4de2a1 M=1 + 50: 0xbb5782 M=1 + 51: 0x40acd7 M=1 + 52: 0x61192d M=1 + 53: 0x4b9521 M=1 + 54: 0x4b9f61 M=1 + 55: 0x4ba024 M=1 + 56: 0x40bd85 M=1 + 57: 0x7fcdc276711c M=4 + 58: 0x42d575 M=1 + 59: 0xc25cc5 M=1 + 60: 0x40651a M=1 +Mappings +1: 0x400000/0xfcb000/0x0 cppbench_server_main +2: 0x7fcdc231e000/0x7fcdc2321000/0x0 /libnss_cache-2.15.so +3: 0x7fcdc2522000/0x7fcdc252e000/0x0 /libnss_files-2.15.so +4: 0x7fcdc272f000/0x7fcdc28dd000/0x0 /libc-2.15.so +5: 0x7fcdc2ae7000/0x7fcdc2be2000/0x0 /libm-2.15.so +6: 0x7fcdc2de3000/0x7fcdc2dea000/0x0 /librt-2.15.so +7: 0x7fcdc2feb000/0x7fcdc3003000/0x0 /libpthread-2.15.so +8: 0x7fcdc3208000/0x7fcdc320a000/0x0 /libdl-2.15.so +9: 0x7fcdc340c000/0x7fcdc3415000/0x0 /libcrypt-2.15.so +10: 0x7fcdc3645000/0x7fcdc3669000/0x0 /ld-2.15.so +11: 0x7fff86bff000/0x7fff86c00000/0x0 [vdso] +12: 0xffffffffff600000/0xffffffffff601000/0x0 [vsyscall] diff --git a/src/profile/testdata/cppbench.heap b/src/profile/testdata/cppbench.heap new file mode 100644 index 00000000..56222507 --- /dev/null +++ b/src/profile/testdata/cppbench.heap @@ -0,0 +1,47 @@ +heap profile: 144: 8498176 [ 144: 8498176] @ heapz_v2/524288 + 1: 9216 [ 1: 9216] @ 0xc635c8 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7f47a54360fe + 1: 144 [ 1: 144] @ 0xc635c8 0xa7479b 0xb65e6b 0xb65f80 0xa6d069 0xa6dc80 0xbbffe6 0xa5dd84 0xa7b7c6 0xaa88da 0xaa9db2 0xb59bae 0xb0c39c 0xb145f3 0xb147ca 0xa5dddd 0xbbffe6 0xa5e837 0xa65f94 0x5aac9e 0x535526 0x535144 0x5aa468 0x7e3ce7 0x7d13a2 0x7e0d28 0x6ab450 0x538d27 0x5390e8 0x5391e3 0x4e9603 + 7: 114688 [ 7: 114688] @ 0xc635c8 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7f47a54360fe + 1: 1792 [ 1: 1792] @ 0xc635c8 0x51a272 0x524997 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7f47a54360fe + 13: 319488 [ 13: 319488] @ 0xc635c8 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7f47a54360fe + 1: 1792 [ 1: 1792] @ 0xc635c8 0xac95a0 0xacdc7c 0xace07b 0xace1ac 0xabd100 0xabe2a9 0x72f52e 0x655376 0x6558d3 0x41c711 0xc25cc6 0x40651b + 1: 2162688 [ 1: 2162688] @ 0xc63568 0xbc462e 0xbc4bb5 0xbc4eda 0x4a57b8 0x4b152c 0x4ae04c 0x4ad225 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7f47a54360fe + 1: 48 [ 1: 48] @ 0xc635c8 0x7be14a 0x7be675 0x6b312d 0xbaa17f 0xbaa142 0xbaabc6 0xbb092c 0x40bce4 0x7f47a4bab11d + 1: 262144 [ 1: 262144] @ 0xc635c8 0x816900 0x8149fd 0x8139f4 0xbbff77 0x81421c 0x4ed414 0x4fd707 0x4de2a2 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7f47a54360fe + 1: 320 [ 1: 320] @ 0xc635c8 0x721a59 0x43005e 0x7382a4 0x430590 0x435425 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7f47a54360fe + 1: 1792 [ 1: 1792] @ 0xc635c8 0x5413b0 0x541ab2 0xbaa17f 0xbaabc6 0x53507c 0xbaa17f 0xbaa9f9 0xbb0d21 0x40bce4 0x7f47a4bab11d + 1: 10240 [ 1: 10240] @ 0xc635c8 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7f47a54360fe + 16: 327680 [ 16: 327680] @ 0xc635c8 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7f47a54360fe + 1: 160 [ 1: 160] @ 0xc635c8 0x578705 0x586247 0x592615 0x592745 0x592cb9 0xa456e4 0x7f47a54360fe + 1: 8192 [ 1: 8192] @ 0xc635c8 0xaaf469 0x52cad7 0x52e89b 0x527f32 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7f47a54360fe + 2: 24576 [ 2: 24576] @ 0xc635c8 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7f47a54360fe + 1: 2097152 [ 1: 2097152] @ 0xc63568 0xbc463b 0xbc4bb5 0xbc4eda 0x4a57b8 0x4b152c 0x4ae04c 0x4ad225 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7f47a54360fe + 1: 448 [ 1: 448] @ 0xc635c8 0xafca3b 0xb09ba0 0xb09ec0 0xb12fec 0xb13a92 0xb13c93 0xb13d9d 0xa02777 0xbbff77 0xa026ec 0x5701e2 0x53541a 0x535144 0x5aa468 0x7e3ce7 0x7d13a2 0x7e0d28 0x6ab450 0x538d27 0x5390e8 0x5391e3 0x4e9603 0x4faa17 0x4fc5f6 0x4fd028 0x4fd230 0x79e80a 0x7a251b 0x7a296d 0xa456e4 + 47: 1925120 [ 47: 1925120] @ 0xc635c8 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7f47a54360fe + 1: 6656 [ 1: 6656] @ 0xc635c8 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7f47a54360fe + 11: 292864 [ 11: 292864] @ 0xc635c8 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7f47a54360fe + 1: 4096 [ 1: 4096] @ 0xc635c8 0x75373b 0x7eb2d3 0x7ecc87 0x7ece56 0x7ed1ce 0x7ed360 0x7edb1a 0x7edbb5 0x7d50b0 0x4b9ba6 0x4b9f62 0x4ba025 0x40bd86 0x7f47a4bab11d + 1: 112 [ 1: 112] @ 0xc635c8 0x430498 0x435425 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7f47a54360fe + 1: 20480 [ 1: 20480] @ 0xc635c8 0x5a8b92 0x526bff 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7f47a54360fe + 1: 48 [ 1: 48] @ 0xc635c8 0x720c2e 0x5d35f0 0xbaa17f 0xbaabc6 0x42f03d 0xbaa17f 0xbaa9f9 0xbb0d21 0x40bce4 0x7f47a4bab11d + 1: 8192 [ 1: 8192] @ 0xc635c8 0xaaf3e6 0xab0ba0 0xab11be 0xab1639 0x52ebdc 0x527f32 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7f47a54360fe + 2: 131072 [ 2: 131072] @ 0xc635c8 0xaaf469 0xaad4ce 0xb66bcd 0xb670f2 0xb659b5 0x63689b 0x548172 0x520cdc 0x521b82 0x5194c9 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7f47a54360fe + 1: 8192 [ 1: 8192] @ 0xc635c8 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7f47a54360fe + 1: 512 [ 1: 512] @ 0xc635c8 0xaff12a 0xb0b331 0xb12f10 0xb13a92 0xb0c443 0xb145f3 0xb147ca 0xa5dddd 0xbbffe6 0xa5e837 0xa65f94 0x5aac9e 0x535526 0x535144 0x5aa468 0x7e3ce7 0x7d13a2 0x7e0d28 0x6ab450 0x538d27 0x5390e8 0x5391e3 0x4e9603 0x4faa17 0x4fc5f6 0x4fd028 0x4fd230 0x79e80a 0x7a251b 0x7a296d + 1: 4608 [ 1: 4608] @ 0xc635c8 0x464379 0xa6318d 0x7feee9 0x5ab69c 0x7b0b26 0x79e81a 0x7a251b 0x7a296d 0xa456e4 0x7f47a54360fe + 23: 753664 [ 23: 753664] @ 0xc635c8 0x42ecc3 0x42e14c 0x5261af 0x526edf 0x5280ab 0x79e80a 0x7a251b 0x7a296d 0xa456e4 0x7f47a54360fe +--- Memory map: --- + source=/home + 00400000-00fcb000: $source/cppbench_server_main + 7f47a4351000-7f47a4352000: /lib/libnss_borg-2.15.so + 7f47a4554000-7f47a4560000: /lib/libnss_files-2.15.so + 7f47a4b73000-7f47a4d21000: /lib/libc-2.15.so + 7f47a4f2b000-7f47a5026000: /lib/libm-2.15.so + 7f47a5227000-7f47a522e000: /lib/librt-2.15.so + 7f47a542f000-7f47a5447000: /lib/libpthread-2.15.so + 7f47a564c000-7f47a564e000: /lib/libdl-2.15.so + 7f47a5850000-7f47a5859000: /lib/libcrypt-2.15.so + 7f47a5a89000-7f47a5aad000: /lib/ld-2.15.so + 7fff63dfe000-7fff63e00000: [vdso] + ffffffffff600000-ffffffffff601000: [vsyscall] + diff --git a/src/profile/testdata/cppbench.heap.string b/src/profile/testdata/cppbench.heap.string new file mode 100644 index 00000000..f1cb6570 --- /dev/null +++ b/src/profile/testdata/cppbench.heap.string @@ -0,0 +1,237 @@ +PeriodType: space bytes +Period: 524288 +Samples: +objects/count space/bytes + 57 528909: 1 2 3 4 5 6 7 8 9 10 11 + bytes:[9216] + 3641 524360: 1 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 17 27 28 29 30 31 32 33 34 35 36 37 38 39 40 + bytes:[144] + 227 3727658: 1 2 3 4 5 6 7 8 9 10 11 + bytes:[16384] + 293 525184: 1 41 42 5 6 7 8 9 10 11 + bytes:[1792] + 283 6976735: 1 2 3 4 5 6 7 8 9 10 11 + bytes:[24576] + 293 525184: 1 43 44 45 46 47 48 49 50 51 52 53 54 + bytes:[1792] + 1 2198218: 55 56 57 58 59 60 61 62 7 8 9 10 11 + bytes:[2162688] + 10923 524312: 1 63 64 65 66 67 68 69 70 71 + bytes:[48] + 2 666237: 1 72 73 74 75 76 77 78 79 7 8 9 10 11 + bytes:[262144] + 1638 524448: 1 80 81 82 83 84 4 5 6 7 8 9 10 11 + bytes:[320] + 293 525184: 1 85 86 66 68 87 66 88 89 70 71 + bytes:[1792] + 51 529424: 1 2 3 4 5 6 7 8 9 10 11 + bytes:[10240] + 417 8553514: 1 2 3 4 5 6 7 8 9 10 11 + bytes:[20480] + 3277 524368: 1 90 91 92 93 94 10 11 + bytes:[160] + 64 528394: 1 95 96 97 98 7 8 9 10 11 + bytes:[8192] + 86 1060911: 1 2 3 4 5 6 7 8 9 10 11 + bytes:[12288] + 1 2136279: 55 99 57 58 59 60 61 62 7 8 9 10 11 + bytes:[2097152] + 1170 524512: 1 100 101 102 103 104 105 106 107 75 108 109 110 31 32 33 34 35 36 37 38 39 40 111 112 113 114 7 8 9 10 + bytes:[448] + 625 25616628: 1 2 3 4 5 6 7 8 9 10 11 + bytes:[40960] + 79 527623: 1 2 3 4 5 6 7 8 9 10 11 + bytes:[6656] + 222 5914839: 1 2 3 4 5 6 7 8 9 10 11 + bytes:[26624] + 128 526338: 1 115 116 117 118 119 120 121 122 123 124 125 126 127 71 + bytes:[4096] + 4681 524344: 1 128 84 4 5 6 7 8 9 10 11 + bytes:[112] + 26 534594: 1 129 130 6 7 8 9 10 11 + bytes:[20480] + 10923 524312: 1 131 132 66 68 133 66 88 89 70 71 + bytes:[48] + 64 528394: 1 134 135 136 137 138 98 7 8 9 10 11 + bytes:[8192] + 17 1115476: 1 95 139 140 141 142 143 144 145 146 147 4 5 6 7 8 9 10 11 + bytes:[65536] + 64 528394: 1 2 3 4 5 6 7 8 9 10 11 + bytes:[8192] + 1024 524544: 1 148 149 150 104 151 24 25 26 17 27 28 29 30 31 32 33 34 35 36 37 38 39 40 111 112 113 114 7 8 9 + bytes:[512] + 114 526595: 1 152 153 154 155 156 157 8 9 10 11 + bytes:[4608] + 379 12439381: 1 2 3 4 5 6 7 8 9 10 11 + bytes:[32768] +Locations + 1: 0xc635c8 M=1 + 2: 0x42ecc2 M=1 + 3: 0x42e14b M=1 + 4: 0x5261ae M=1 + 5: 0x526ede M=1 + 6: 0x5280aa M=1 + 7: 0x79e809 M=1 + 8: 0x7a251a M=1 + 9: 0x7a296c M=1 + 10: 0xa456e3 M=1 + 11: 0x7f47a54360fd M=7 + 12: 0xa7479a M=1 + 13: 0xb65e6a M=1 + 14: 0xb65f7f M=1 + 15: 0xa6d068 M=1 + 16: 0xa6dc7f M=1 + 17: 0xbbffe5 M=1 + 18: 0xa5dd83 M=1 + 19: 0xa7b7c5 M=1 + 20: 0xaa88d9 M=1 + 21: 0xaa9db1 M=1 + 22: 0xb59bad M=1 + 23: 0xb0c39b M=1 + 24: 0xb145f2 M=1 + 25: 0xb147c9 M=1 + 26: 0xa5dddc M=1 + 27: 0xa5e836 M=1 + 28: 0xa65f93 M=1 + 29: 0x5aac9d M=1 + 30: 0x535525 M=1 + 31: 0x535143 M=1 + 32: 0x5aa467 M=1 + 33: 0x7e3ce6 M=1 + 34: 0x7d13a1 M=1 + 35: 0x7e0d27 M=1 + 36: 0x6ab44f M=1 + 37: 0x538d26 M=1 + 38: 0x5390e7 M=1 + 39: 0x5391e2 M=1 + 40: 0x4e9602 M=1 + 41: 0x51a271 M=1 + 42: 0x524996 M=1 + 43: 0xac959f M=1 + 44: 0xacdc7b M=1 + 45: 0xace07a M=1 + 46: 0xace1ab M=1 + 47: 0xabd0ff M=1 + 48: 0xabe2a8 M=1 + 49: 0x72f52d M=1 + 50: 0x655375 M=1 + 51: 0x6558d2 M=1 + 52: 0x41c710 M=1 + 53: 0xc25cc5 M=1 + 54: 0x40651a M=1 + 55: 0xc63568 M=1 + 56: 0xbc462d M=1 + 57: 0xbc4bb4 M=1 + 58: 0xbc4ed9 M=1 + 59: 0x4a57b7 M=1 + 60: 0x4b152b M=1 + 61: 0x4ae04b M=1 + 62: 0x4ad224 M=1 + 63: 0x7be149 M=1 + 64: 0x7be674 M=1 + 65: 0x6b312c M=1 + 66: 0xbaa17e M=1 + 67: 0xbaa141 M=1 + 68: 0xbaabc5 M=1 + 69: 0xbb092b M=1 + 70: 0x40bce3 M=1 + 71: 0x7f47a4bab11c M=4 + 72: 0x8168ff M=1 + 73: 0x8149fc M=1 + 74: 0x8139f3 M=1 + 75: 0xbbff76 M=1 + 76: 0x81421b M=1 + 77: 0x4ed413 M=1 + 78: 0x4fd706 M=1 + 79: 0x4de2a1 M=1 + 80: 0x721a58 M=1 + 81: 0x43005d M=1 + 82: 0x7382a3 M=1 + 83: 0x43058f M=1 + 84: 0x435424 M=1 + 85: 0x5413af M=1 + 86: 0x541ab1 M=1 + 87: 0x53507b M=1 + 88: 0xbaa9f8 M=1 + 89: 0xbb0d20 M=1 + 90: 0x578704 M=1 + 91: 0x586246 M=1 + 92: 0x592614 M=1 + 93: 0x592744 M=1 + 94: 0x592cb8 M=1 + 95: 0xaaf468 M=1 + 96: 0x52cad6 M=1 + 97: 0x52e89a M=1 + 98: 0x527f31 M=1 + 99: 0xbc463a M=1 + 100: 0xafca3a M=1 + 101: 0xb09b9f M=1 + 102: 0xb09ebf M=1 + 103: 0xb12feb M=1 + 104: 0xb13a91 M=1 + 105: 0xb13c92 M=1 + 106: 0xb13d9c M=1 + 107: 0xa02776 M=1 + 108: 0xa026eb M=1 + 109: 0x5701e1 M=1 + 110: 0x535419 M=1 + 111: 0x4faa16 M=1 + 112: 0x4fc5f5 M=1 + 113: 0x4fd027 M=1 + 114: 0x4fd22f M=1 + 115: 0x75373a M=1 + 116: 0x7eb2d2 M=1 + 117: 0x7ecc86 M=1 + 118: 0x7ece55 M=1 + 119: 0x7ed1cd M=1 + 120: 0x7ed35f M=1 + 121: 0x7edb19 M=1 + 122: 0x7edbb4 M=1 + 123: 0x7d50af M=1 + 124: 0x4b9ba5 M=1 + 125: 0x4b9f61 M=1 + 126: 0x4ba024 M=1 + 127: 0x40bd85 M=1 + 128: 0x430497 M=1 + 129: 0x5a8b91 M=1 + 130: 0x526bfe M=1 + 131: 0x720c2d M=1 + 132: 0x5d35ef M=1 + 133: 0x42f03c M=1 + 134: 0xaaf3e5 M=1 + 135: 0xab0b9f M=1 + 136: 0xab11bd M=1 + 137: 0xab1638 M=1 + 138: 0x52ebdb M=1 + 139: 0xaad4cd M=1 + 140: 0xb66bcc M=1 + 141: 0xb670f1 M=1 + 142: 0xb659b4 M=1 + 143: 0x63689a M=1 + 144: 0x548171 M=1 + 145: 0x520cdb M=1 + 146: 0x521b81 M=1 + 147: 0x5194c8 M=1 + 148: 0xaff129 M=1 + 149: 0xb0b330 M=1 + 150: 0xb12f0f M=1 + 151: 0xb0c442 M=1 + 152: 0x464378 M=1 + 153: 0xa6318c M=1 + 154: 0x7feee8 M=1 + 155: 0x5ab69b M=1 + 156: 0x7b0b25 M=1 + 157: 0x79e819 M=1 +Mappings +1: 0x400000/0xfcb000/0x0 /home/cppbench_server_main +2: 0x7f47a4351000/0x7f47a4352000/0x0 /lib/libnss_borg-2.15.so +3: 0x7f47a4554000/0x7f47a4560000/0x0 /lib/libnss_files-2.15.so +4: 0x7f47a4b73000/0x7f47a4d21000/0x0 /lib/libc-2.15.so +5: 0x7f47a4f2b000/0x7f47a5026000/0x0 /lib/libm-2.15.so +6: 0x7f47a5227000/0x7f47a522e000/0x0 /lib/librt-2.15.so +7: 0x7f47a542f000/0x7f47a5447000/0x0 /lib/libpthread-2.15.so +8: 0x7f47a564c000/0x7f47a564e000/0x0 /lib/libdl-2.15.so +9: 0x7f47a5850000/0x7f47a5859000/0x0 /lib/libcrypt-2.15.so +10: 0x7f47a5a89000/0x7f47a5aad000/0x0 /lib/ld-2.15.so +11: 0x7fff63dfe000/0x7fff63e00000/0x0 [vdso] +12: 0xffffffffff600000/0xffffffffff601000/0x0 [vsyscall] diff --git a/src/profile/testdata/cppbench.svg b/src/profile/testdata/cppbench.svg new file mode 100644 index 00000000..f397c5e0 --- /dev/null +++ b/src/profile/testdata/cppbench.svg @@ -0,0 +1,55 @@ + + + + + + +cppbench_server_main + +cluster_L + + + +File: cppbench_server_main + +File: cppbench_server_main +Type: cpu +0 of 7120000000ns total (0%) +Dropped 56 nodes (cum <= 35600000ns) +Showing top 2 nodes out of 38 (cum >= 7070000000ns) + + +N1 + + +start_thread +0 of 7120000000ns(100%) + + + + +N2 + + +RunWorkerLoop +0 of 7070000000ns(99.30%) + + + + +N1->N2 + + + + + + + 7070000000ns + + + + + diff --git a/src/profile/testdata/cppbench.thread b/src/profile/testdata/cppbench.thread new file mode 100644 index 00000000..0192dd6d --- /dev/null +++ b/src/profile/testdata/cppbench.thread @@ -0,0 +1,29 @@ +--- threadz 1 --- + +--- Thread 7f794ab90940 (name: main/14748) stack: --- + PC: 0x00bc8f1c: helper(arg *) + 0x0040be31: main + 0x7f7949a9811d: __libc_start_main +--- Thread 7f794964e700 (name: thread1/14751) stack: --- + PC: 0x7f794a32bf7d: nanosleep + 0x7f794a32414e: start_thread + creator: 0xa45b96 0xa460b4 0xbaa17f 0xbaa9f9 0xbb0d21 0x40bce4 0x7f7949a9811d +--- Thread 7f794934c700 (name: thread2/14752) stack: --- + PC: 0x00bc8f1c: Wait(int) + 0x7f794a32414e: start_thread + creator: 0xa45b96 0xa48928 0xbaa17f 0xbaa9f9 0xbb0d21 0x40bce4 0x7f7949a9811d +--- Thread 7f7948978700 (name: thread3/14759) stack: --- + [same as previous thread] +--- Memory map: --- + 00400000-00fcb000: /home/rsilvera/cppbench/cppbench_server_main + 7f794964f000-7f7949652000: /lib/libnss_cache-2.15.so + 7f7949853000-7f794985f000: /lib/libnss_files-2.15.so + 7f7949a60000-7f7949c0e000: /lib/libc-2.15.so + 7f7949e19000-7f7949f14000: /lib/libm-2.15.so + 7f794a115000-7f794a11c000: /lib/librt-2.15.so + 7f794a31d000-7f794a335000: /lib/libpthread-2.15.so + 7f794a53a000-7f794a53d000: /lib/libdl-2.15.so + 7f794a73e000-7f794a747000: /lib/libcrypt-2.15.so + 7f794a977000-7f794a99b000: /lib/ld-2.15.so + 7fffb8dff000-7fffb8e00000: [vdso] + ffffffffff600000-ffffffffff601000: [vsyscall] diff --git a/src/profile/testdata/cppbench.thread.all b/src/profile/testdata/cppbench.thread.all new file mode 100644 index 00000000..a3f8893e --- /dev/null +++ b/src/profile/testdata/cppbench.thread.all @@ -0,0 +1,33 @@ +--- threadz 1 --- + +--- Thread 7eff063d9940 (name: main/25376) stack: --- + PC: 0x00bc8f1c: helper(arg*) + 0x0040be31: main + 0x7eff052e111d: __libc_start_main +--- Thread 7eff04e97700 (name: thread1/25379) stack: --- + PC: 0x7eff05b74f7d: nanosleep + 0x7eff05b6d14e: start_thread + creator: + 0x0040bce4: main + 0x7eff052e111d: __libc_start_main +--- Thread 7eff04770700 (name: thread2/25382) stack: --- + PC: 0x00bc8f1c: Wait(int) + 0x7eff05b6d14e: start_thread + creator: + 0x0040bd6e: main + 0x7eff052e111d: __libc_start_main +--- Thread 7eff0464d700 (name: thread3/25383) stack: --- + [same as previous thread] +--- Memory map: --- + 00400000-00fcb000: /home/rsilvera/cppbench/cppbench_server_main + 7eff04e98000-7eff04e9b000: /lib/libnss_cache-2.15.so + 7eff0509c000-7eff050a8000: /lib/libnss_files-2.15.so + 7eff052a9000-7eff05457000: /lib/libc-2.15.so + 7eff05662000-7eff0575d000: /lib/libm-2.15.so + 7eff0595e000-7eff05965000: /lib/librt-2.15.so + 7eff05b66000-7eff05b7e000: /lib/libpthread-2.15.so + 7eff05d83000-7eff05d86000: /lib/libdl-2.15.so + 7eff05f87000-7eff05f90000: /lib/libcrypt-2.15.so + 7eff061c0000-7eff061e4000: /lib/ld-2.15.so + 7fff2edff000-7fff2ee00000: [vdso] + ffffffffff600000-ffffffffff601000: [vsyscall] diff --git a/src/profile/testdata/cppbench.thread.all.string b/src/profile/testdata/cppbench.thread.all.string new file mode 100644 index 00000000..c7c0f024 --- /dev/null +++ b/src/profile/testdata/cppbench.thread.all.string @@ -0,0 +1,28 @@ +PeriodType: thread count +Period: 1 +Samples: +thread/count + 1: 1 2 3 + 1: 4 5 6 3 + 2: 1 5 7 3 +Locations + 1: 0xbc8f1c M=1 + 2: 0x40be30 M=1 + 3: 0x7eff052e111c M=4 + 4: 0x7eff05b74f7d M=7 + 5: 0x7eff05b6d14d M=7 + 6: 0x40bce3 M=1 + 7: 0x40bd6d M=1 +Mappings +1: 0x400000/0xfcb000/0x0 /home/rsilvera/cppbench/cppbench_server_main +2: 0x7eff04e98000/0x7eff04e9b000/0x0 /lib/libnss_cache-2.15.so +3: 0x7eff0509c000/0x7eff050a8000/0x0 /lib/libnss_files-2.15.so +4: 0x7eff052a9000/0x7eff05457000/0x0 /lib/libc-2.15.so +5: 0x7eff05662000/0x7eff0575d000/0x0 /lib/libm-2.15.so +6: 0x7eff0595e000/0x7eff05965000/0x0 /lib/librt-2.15.so +7: 0x7eff05b66000/0x7eff05b7e000/0x0 /lib/libpthread-2.15.so +8: 0x7eff05d83000/0x7eff05d86000/0x0 /lib/libdl-2.15.so +9: 0x7eff05f87000/0x7eff05f90000/0x0 /lib/libcrypt-2.15.so +10: 0x7eff061c0000/0x7eff061e4000/0x0 /lib/ld-2.15.so +11: 0x7fff2edff000/0x7fff2ee00000/0x0 [vdso] +12: 0xffffffffff600000/0xffffffffff601000/0x0 [vsyscall] diff --git a/src/profile/testdata/cppbench.thread.none b/src/profile/testdata/cppbench.thread.none new file mode 100644 index 00000000..6ab24214 --- /dev/null +++ b/src/profile/testdata/cppbench.thread.none @@ -0,0 +1,27 @@ +--- threadz 1 --- + +--- Thread 7eff063d9940 (name: main/25376) stack: --- + PC: 0xbc8f1c 0xbcae55 0xbcb5f5 0x40b688 0x4d5f51 0x40be31 0x7eff052e111d +--- Thread 7eff04b95700 (name: thread1/25380) stack: --- + PC: 0xbc8f1c 0xbcbd00 0xa47f60 0xa456e4 0x7eff05b6d14e + creator: 0xa45b96 0xa48928 0xbaa17f 0xbaa9f9 0xbb0d21 0x40bce4 0x7eff052e111d +--- Thread 7eff04893700 (name: thread2/25381) stack: --- + PC: 0x7eff052dfa93 0x7a1956 0x7a1c45 0x7a2727 0x7a296d 0xa456e4 + 0x7eff05b6d14e + creator: 0xa45b96 0x7a37d2 0x7a3e8d 0xbbff77 0x79ec1c 0x40bd6e 0x7eff052e111d +--- Thread 7eff04770700 (name: thread3/25382) stack: --- + PC: 0xbc8f1c 0x7a2691 0x7a296d 0xa456e4 0x7eff05b6d14e + creator: 0xa45b96 0x7a37d2 0x7a3e8d 0xbbff77 0x79ec1c 0x40bd6e 0x7eff052e111d +--- Memory map: --- + 00400000-00fcb000: /home/rsilvera/cppbench/cppbench_server_main.unstripped + 7eff04e98000-7eff04e9b000: /lib/libnss_cache-2.15.so + 7eff0509c000-7eff050a8000: /lib/libnss_files-2.15.so + 7eff052a9000-7eff05457000: /lib/libc-2.15.so + 7eff05662000-7eff0575d000: /lib/libm-2.15.so + 7eff0595e000-7eff05965000: /lib/librt-2.15.so + 7eff05b66000-7eff05b7e000: /lib/libpthread-2.15.so + 7eff05d83000-7eff05d86000: /lib/libdl-2.15.so + 7eff05f87000-7eff05f90000: /lib/libcrypt-2.15.so + 7eff061c0000-7eff061e4000: /lib/ld-2.15.so + 7fff2edff000-7fff2ee00000: [vdso] + ffffffffff600000-ffffffffff601000: [vsyscall] diff --git a/src/profile/testdata/cppbench.thread.none.string b/src/profile/testdata/cppbench.thread.none.string new file mode 100644 index 00000000..af0ad3cb --- /dev/null +++ b/src/profile/testdata/cppbench.thread.none.string @@ -0,0 +1,50 @@ +PeriodType: thread count +Period: 1 +Samples: +thread/count + 1: 1 2 3 4 5 6 7 + 1: 1 8 9 10 11 12 13 14 15 16 17 7 + 1: 18 19 20 21 22 10 11 12 23 24 25 26 27 7 + 1: 1 28 22 10 11 12 23 24 25 26 27 7 +Locations + 1: 0xbc8f1c M=1 + 2: 0xbcae54 M=1 + 3: 0xbcb5f4 M=1 + 4: 0x40b687 M=1 + 5: 0x4d5f50 M=1 + 6: 0x40be30 M=1 + 7: 0x7eff052e111c M=4 + 8: 0xbcbcff M=1 + 9: 0xa47f5f M=1 + 10: 0xa456e3 M=1 + 11: 0x7eff05b6d14d M=7 + 12: 0xa45b95 M=1 + 13: 0xa48927 M=1 + 14: 0xbaa17e M=1 + 15: 0xbaa9f8 M=1 + 16: 0xbb0d20 M=1 + 17: 0x40bce3 M=1 + 18: 0x7eff052dfa93 M=4 + 19: 0x7a1955 M=1 + 20: 0x7a1c44 M=1 + 21: 0x7a2726 M=1 + 22: 0x7a296c M=1 + 23: 0x7a37d1 M=1 + 24: 0x7a3e8c M=1 + 25: 0xbbff76 M=1 + 26: 0x79ec1b M=1 + 27: 0x40bd6d M=1 + 28: 0x7a2690 M=1 +Mappings +1: 0x400000/0xfcb000/0x0 /home/rsilvera/cppbench/cppbench_server_main.unstripped +2: 0x7eff04e98000/0x7eff04e9b000/0x0 /lib/libnss_cache-2.15.so +3: 0x7eff0509c000/0x7eff050a8000/0x0 /lib/libnss_files-2.15.so +4: 0x7eff052a9000/0x7eff05457000/0x0 /lib/libc-2.15.so +5: 0x7eff05662000/0x7eff0575d000/0x0 /lib/libm-2.15.so +6: 0x7eff0595e000/0x7eff05965000/0x0 /lib/librt-2.15.so +7: 0x7eff05b66000/0x7eff05b7e000/0x0 /lib/libpthread-2.15.so +8: 0x7eff05d83000/0x7eff05d86000/0x0 /lib/libdl-2.15.so +9: 0x7eff05f87000/0x7eff05f90000/0x0 /lib/libcrypt-2.15.so +10: 0x7eff061c0000/0x7eff061e4000/0x0 /lib/ld-2.15.so +11: 0x7fff2edff000/0x7fff2ee00000/0x0 [vdso] +12: 0xffffffffff600000/0xffffffffff601000/0x0 [vsyscall] diff --git a/src/profile/testdata/cppbench.thread.string b/src/profile/testdata/cppbench.thread.string new file mode 100644 index 00000000..bf3f0f36 --- /dev/null +++ b/src/profile/testdata/cppbench.thread.string @@ -0,0 +1,33 @@ +PeriodType: thread count +Period: 1 +Samples: +thread/count + 1: 1 2 3 + 1: 4 5 6 7 8 9 10 11 3 + 2: 1 5 6 12 8 9 10 11 3 +Locations + 1: 0xbc8f1c M=1 + 2: 0x40be30 M=1 + 3: 0x7f7949a9811c M=4 + 4: 0x7f794a32bf7d M=7 + 5: 0x7f794a32414d M=7 + 6: 0xa45b95 M=1 + 7: 0xa460b3 M=1 + 8: 0xbaa17e M=1 + 9: 0xbaa9f8 M=1 + 10: 0xbb0d20 M=1 + 11: 0x40bce3 M=1 + 12: 0xa48927 M=1 +Mappings +1: 0x400000/0xfcb000/0x0 /home/rsilvera/cppbench/cppbench_server_main +2: 0x7f794964f000/0x7f7949652000/0x0 /lib/libnss_cache-2.15.so +3: 0x7f7949853000/0x7f794985f000/0x0 /lib/libnss_files-2.15.so +4: 0x7f7949a60000/0x7f7949c0e000/0x0 /lib/libc-2.15.so +5: 0x7f7949e19000/0x7f7949f14000/0x0 /lib/libm-2.15.so +6: 0x7f794a115000/0x7f794a11c000/0x0 /lib/librt-2.15.so +7: 0x7f794a31d000/0x7f794a335000/0x0 /lib/libpthread-2.15.so +8: 0x7f794a53a000/0x7f794a53d000/0x0 /lib/libdl-2.15.so +9: 0x7f794a73e000/0x7f794a747000/0x0 /lib/libcrypt-2.15.so +10: 0x7f794a977000/0x7f794a99b000/0x0 /lib/ld-2.15.so +11: 0x7fffb8dff000/0x7fffb8e00000/0x0 [vdso] +12: 0xffffffffff600000/0xffffffffff601000/0x0 [vsyscall] diff --git a/src/profile/testdata/go.crc32.cpu b/src/profile/testdata/go.crc32.cpu new file mode 100644 index 00000000..ce08313d Binary files /dev/null and b/src/profile/testdata/go.crc32.cpu differ diff --git a/src/profile/testdata/go.crc32.cpu.string b/src/profile/testdata/go.crc32.cpu.string new file mode 100644 index 00000000..c2838b8c --- /dev/null +++ b/src/profile/testdata/go.crc32.cpu.string @@ -0,0 +1,87 @@ +PeriodType: cpu nanoseconds +Period: 10000000 +Samples: +samples/count cpu/nanoseconds + 1 10000000: 1 2 3 4 5 + 2 20000000: 6 2 3 4 5 + 1 10000000: 1 2 3 4 5 + 1 10000000: 7 2 3 4 5 + 2 20000000: 8 2 3 4 5 + 1 10000000: 7 2 3 4 5 + 1 10000000: 6 2 3 4 5 + 1 10000000: 7 2 3 4 5 + 1 10000000: 8 2 3 4 5 + 1 10000000: 6 2 3 4 5 + 1 10000000: 1 2 3 4 5 + 1 10000000: 8 2 3 4 5 + 1 10000000: 6 2 3 4 5 + 1 10000000: 7 2 3 4 5 + 1 10000000: 8 2 3 4 5 + 4 40000000: 7 2 3 4 5 + 1 10000000: 8 2 3 4 5 + 1 10000000: 7 2 3 4 5 + 2 20000000: 6 2 3 4 5 + 1 10000000: 8 2 3 4 5 + 1 10000000: 1 2 3 4 5 + 1 10000000: 7 2 3 4 5 + 1 10000000: 8 2 3 4 5 + 1 10000000: 1 2 3 4 5 + 1 10000000: 7 2 3 4 5 + 2 20000000: 6 2 3 4 5 + 1 10000000: 7 2 3 4 5 + 1 10000000: 6 2 3 4 5 + 1 10000000: 1 2 3 4 5 + 2 20000000: 8 2 3 4 5 + 1 10000000: 7 2 3 4 5 + 1 10000000: 8 2 3 4 5 + 1 10000000: 1 2 3 4 5 + 1 10000000: 7 2 3 4 5 + 1 10000000: 1 2 3 4 5 + 1 10000000: 6 2 3 4 5 + 1 10000000: 8 2 3 4 5 + 1 10000000: 1 2 3 4 5 + 1 10000000: 8 2 3 4 5 + 1 10000000: 7 2 3 4 5 + 1 10000000: 6 2 3 4 5 + 2 20000000: 1 2 3 4 5 + 1 10000000: 7 2 3 4 5 + 1 10000000: 8 2 3 4 5 + 3 30000000: 7 2 3 4 5 + 1 10000000: 1 2 3 4 5 + 1 10000000: 8 2 3 4 5 + 2 20000000: 1 2 3 4 5 + 2 20000000: 7 2 3 4 5 + 1 10000000: 8 2 3 4 5 + 1 10000000: 7 2 3 4 5 + 1 10000000: 6 2 3 4 5 + 2 20000000: 7 2 3 4 5 + 1 10000000: 6 2 3 4 5 + 1 10000000: 1 2 3 4 5 + 1 10000000: 7 2 3 4 5 + 2 20000000: 6 2 3 4 5 + 1 10000000: 1 2 3 4 5 + 1 10000000: 6 2 3 4 5 + 1 10000000: 8 2 3 4 5 + 1 10000000: 6 2 3 4 5 + 1 10000000: 8 2 3 4 5 + 1 10000000: 6 2 3 4 5 + 1 10000000: 8 2 3 4 5 + 1 10000000: 1 2 3 4 5 + 85 850000000: 9 2 3 4 5 + 21 210000000: 10 2 3 4 5 + 1 10000000: 7 2 3 4 5 + 24 240000000: 11 2 3 4 5 +Locations + 1: 0x430b93 M=1 + 2: 0x4317eb M=1 + 3: 0x42a065 M=1 + 4: 0x42a31b M=1 + 5: 0x415d0f M=1 + 6: 0x430baa M=1 + 7: 0x430bb5 M=1 + 8: 0x430ba6 M=1 + 9: 0x430bac M=1 + 10: 0x430b9f M=1 + 11: 0x430bb3 M=1 +Mappings +1: 0x0/0xffffffffffffffff/0x0 diff --git a/src/profile/testdata/go.godoc.thread b/src/profile/testdata/go.godoc.thread new file mode 100644 index 00000000..1c8582b5 --- /dev/null +++ b/src/profile/testdata/go.godoc.thread @@ -0,0 +1,8 @@ +threadcreate profile: total 7 +1 @ 0x44cb3 0x45045 0x45323 0x45534 0x47e9c 0x47c98 0x44ba2 0x2720fe 0x271fb5 +1 @ 0x44cb3 0x45045 0x45323 0x45534 0x46716 0x51584 0x461e0 +1 @ 0x44cb3 0x45045 0x45323 0x45547 0x46716 0x40963 0x461e0 +1 @ 0x44cb3 0x45045 0x45323 0x45547 0x4562e 0x460ed 0x51a59 +1 @ 0x44cb3 0x45045 0x441ae 0x461e0 +1 @ 0x44cb3 0x44e04 0x44b80 0x5192d +1 @ 0x440e2 0x5191a diff --git a/src/profile/testdata/go.godoc.thread.string b/src/profile/testdata/go.godoc.thread.string new file mode 100644 index 00000000..47b63aa9 --- /dev/null +++ b/src/profile/testdata/go.godoc.thread.string @@ -0,0 +1,37 @@ +PeriodType: threadcreate count +Period: 1 +Samples: +threadcreate/count + 1: 1 2 3 4 5 6 7 8 9 + 1: 1 2 3 4 10 11 12 + 1: 1 2 3 13 10 14 12 + 1: 1 2 3 13 15 16 17 + 1: 1 2 18 12 + 1: 1 19 20 21 + 1: 22 23 +Locations + 1: 0x44cb3 M=1 + 2: 0x45044 M=1 + 3: 0x45322 M=1 + 4: 0x45533 M=1 + 5: 0x47e9b M=1 + 6: 0x47c97 M=1 + 7: 0x44ba1 M=1 + 8: 0x2720fd M=1 + 9: 0x271fb4 M=1 + 10: 0x46715 M=1 + 11: 0x51583 M=1 + 12: 0x461df M=1 + 13: 0x45546 M=1 + 14: 0x40962 M=1 + 15: 0x4562d M=1 + 16: 0x460ec M=1 + 17: 0x51a58 M=1 + 18: 0x441ad M=1 + 19: 0x44e03 M=1 + 20: 0x44b7f M=1 + 21: 0x5192c M=1 + 22: 0x440e2 M=1 + 23: 0x51919 M=1 +Mappings +1: 0x0/0xffffffffffffffff/0x0 diff --git a/src/profile/testdata/gobench.cpu b/src/profile/testdata/gobench.cpu new file mode 100644 index 00000000..e921d21a Binary files /dev/null and b/src/profile/testdata/gobench.cpu differ diff --git a/src/profile/testdata/gobench.cpu.string b/src/profile/testdata/gobench.cpu.string new file mode 100644 index 00000000..7df1533a --- /dev/null +++ b/src/profile/testdata/gobench.cpu.string @@ -0,0 +1,415 @@ +PeriodType: cpu nanoseconds +Period: 10000000 +Samples: +samples/count cpu/nanoseconds + 1 10000000: 1 2 + 1 10000000: 3 2 + 1 10000000: 4 2 + 1 10000000: 5 2 + 1 10000000: 6 2 + 1 10000000: 7 2 + 1 10000000: 8 2 + 1 10000000: 9 2 + 1 10000000: 10 2 + 1 10000000: 11 2 + 1 10000000: 12 2 + 1 10000000: 13 2 + 1 10000000: 14 2 + 1 10000000: 15 2 + 1 10000000: 16 2 + 1 10000000: 17 2 + 1 10000000: 18 2 + 1 10000000: 16 2 + 1 10000000: 19 2 + 1 10000000: 20 2 + 1 10000000: 21 2 + 1 10000000: 22 2 + 1 10000000: 23 2 + 1 10000000: 24 2 + 1 10000000: 25 2 + 1 10000000: 15 2 + 1 10000000: 26 2 + 1 10000000: 9 2 + 1 10000000: 27 2 + 1 10000000: 28 2 + 1 10000000: 29 2 + 1 10000000: 30 2 + 1 10000000: 31 2 + 1 10000000: 32 2 + 1 10000000: 24 2 + 1 10000000: 30 2 + 1 10000000: 33 2 + 1 10000000: 34 2 + 1 10000000: 35 2 + 1 10000000: 36 2 + 1 10000000: 27 2 + 1 10000000: 37 2 + 1 10000000: 38 2 + 1 10000000: 19 2 + 1 10000000: 39 2 + 1 10000000: 40 2 + 1 10000000: 41 2 + 1 10000000: 16 2 + 1 10000000: 42 2 + 1 10000000: 43 2 + 1 10000000: 44 2 + 1 10000000: 45 2 + 1 10000000: 46 2 + 1 10000000: 47 2 + 1 10000000: 48 2 + 1 10000000: 40 2 + 1 10000000: 10 2 + 1 10000000: 49 2 + 1 10000000: 50 2 + 1 10000000: 51 2 + 1 10000000: 52 2 + 1 10000000: 53 2 + 1 10000000: 30 2 + 1 10000000: 54 2 + 1 10000000: 55 2 + 1 10000000: 36 2 + 1 10000000: 56 2 + 1 10000000: 57 2 + 1 10000000: 58 2 + 1 10000000: 59 2 + 1 10000000: 60 2 + 1 10000000: 61 2 + 1 10000000: 57 2 + 1 10000000: 62 2 + 1 10000000: 63 2 + 1 10000000: 30 2 + 1 10000000: 64 2 + 1 10000000: 16 2 + 1 10000000: 65 2 + 1 10000000: 26 2 + 1 10000000: 40 2 + 1 10000000: 66 2 + 1 10000000: 58 2 + 1 10000000: 67 2 + 1 10000000: 68 2 + 1 10000000: 69 2 + 1 10000000: 70 2 + 1 10000000: 71 2 + 1 10000000: 72 2 + 1 10000000: 51 2 + 1 10000000: 73 2 + 1 10000000: 74 2 + 1 10000000: 75 2 + 1 10000000: 76 2 + 1 10000000: 77 2 + 1 10000000: 78 2 + 1 10000000: 79 2 + 1 10000000: 80 2 + 1 10000000: 81 2 + 1 10000000: 82 2 + 1 10000000: 83 2 + 1 10000000: 84 2 + 1 10000000: 85 2 + 1 10000000: 86 2 + 1 10000000: 10 2 + 1 10000000: 87 2 + 1 10000000: 88 2 + 1 10000000: 89 2 + 1 10000000: 90 2 + 1 10000000: 63 2 + 1 10000000: 91 2 + 1 10000000: 5 2 + 1 10000000: 92 2 + 1 10000000: 93 2 + 1 10000000: 94 2 + 1 10000000: 19 2 + 1 10000000: 95 2 + 1 10000000: 30 2 + 1 10000000: 96 2 + 1 10000000: 10 2 + 1 10000000: 97 2 + 1 10000000: 98 2 + 1 10000000: 99 2 + 1 10000000: 62 2 + 1 10000000: 92 2 + 1 10000000: 100 2 + 1 10000000: 101 2 + 1 10000000: 39 2 + 1 10000000: 102 2 + 1 10000000: 86 2 + 1 10000000: 33 2 + 1 10000000: 103 2 + 1 10000000: 104 2 + 1 10000000: 13 2 + 2 20000000: 105 2 + 1 10000000: 106 2 + 1 10000000: 52 2 + 1 10000000: 24 2 + 1 10000000: 107 2 + 1 10000000: 108 2 + 1 10000000: 52 2 + 1 10000000: 109 2 + 1 10000000: 5 2 + 1 10000000: 82 2 + 1 10000000: 8 2 + 1 10000000: 110 2 + 1 10000000: 111 2 + 1 10000000: 112 2 + 1 10000000: 113 2 + 1 10000000: 114 2 + 1 10000000: 115 2 + 1 10000000: 116 2 + 1 10000000: 19 2 + 1 10000000: 64 2 + 1 10000000: 106 2 + 1 10000000: 117 2 + 1 10000000: 30 2 + 1 10000000: 118 2 + 1 10000000: 86 2 + 1 10000000: 119 2 + 1 10000000: 120 2 + 1 10000000: 121 2 + 1 10000000: 81 2 + 2 20000000: 10 2 + 1 10000000: 19 2 + 1 10000000: 122 2 + 1 10000000: 123 2 + 1 10000000: 105 2 + 1 10000000: 124 2 + 1 10000000: 125 2 + 1 10000000: 46 2 + 1 10000000: 8 2 + 10 100000000: 21 2 + 7 70000000: 126 2 + 3 30000000: 9 2 + 1 10000000: 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 + 1 10000000: 144 2 + 5 50000000: 145 2 + 25 250000000: 146 2 + 1 10000000: 147 2 + 1 10000000: 148 149 150 134 135 136 137 138 139 140 141 142 143 + 1 10000000: 151 152 153 154 155 135 136 137 138 139 140 141 142 143 + 1 10000000: 156 157 153 154 155 135 136 137 138 139 140 141 142 143 + 1 10000000: 158 159 132 133 134 135 136 137 138 139 140 141 142 143 + 4 40000000: 27 2 + 4 40000000: 160 2 + 1 10000000: 116 2 + 5 50000000: 161 2 + 20 200000000: 162 163 164 135 136 137 138 139 140 141 142 143 + 1 10000000: 165 166 167 164 135 136 137 138 139 140 141 142 143 + 1 10000000: 168 169 167 164 135 136 137 138 139 140 141 142 143 + 2 20000000: 170 171 172 142 143 + 2 20000000: 173 171 172 142 143 + 1 10000000: 105 174 175 154 155 176 177 140 141 142 143 + 1 10000000: 178 179 176 177 140 141 142 143 + 1 10000000: 180 181 182 181 183 184 185 186 187 188 189 190 191 192 193 194 143 + 7 70000000: 195 2 + 2 20000000: 196 2 + 8 80000000: 16 2 + 1 10000000: 197 2 + 1 10000000: 146 198 199 135 136 137 138 139 140 141 142 143 + 1 10000000: 200 199 135 136 137 138 139 140 141 142 143 + 3 30000000: 162 179 135 136 137 138 139 140 141 142 143 + 1 10000000: 201 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 + 1 10000000: 202 167 152 153 154 155 135 136 137 138 139 140 141 142 143 + 6 60000000: 162 163 152 153 154 155 135 136 137 138 139 140 141 142 143 +Locations + 1: 0x410bc0 M=1 + 2: 0x41a770 M=1 + 3: 0x410b4b M=1 + 4: 0x40f534 M=1 + 5: 0x40f018 M=1 + 6: 0x421f4f M=1 + 7: 0x40e46f M=1 + 8: 0x40f0e3 M=1 + 9: 0x4286c7 M=1 + 10: 0x40f15b M=1 + 11: 0x40efb1 M=1 + 12: 0x41250d M=1 + 13: 0x427854 M=1 + 14: 0x40e688 M=1 + 15: 0x410b61 M=1 + 16: 0x40fa72 M=1 + 17: 0x40e92a M=1 + 18: 0x421ff1 M=1 + 19: 0x42830d M=1 + 20: 0x41cf23 M=1 + 21: 0x40e7cb M=1 + 22: 0x40ea46 M=1 + 23: 0x40f792 M=1 + 24: 0x40f023 M=1 + 25: 0x40ee50 M=1 + 26: 0x40c6ab M=1 + 27: 0x40fa51 M=1 + 28: 0x40f14b M=1 + 29: 0x421fca M=1 + 30: 0x4285d3 M=1 + 31: 0x410ba9 M=1 + 32: 0x40e75f M=1 + 33: 0x4277a1 M=1 + 34: 0x40e89f M=1 + 35: 0x40ea54 M=1 + 36: 0x40f0ab M=1 + 37: 0x40ef9b M=1 + 38: 0x410d6a M=1 + 39: 0x40e455 M=1 + 40: 0x427856 M=1 + 41: 0x40e80b M=1 + 42: 0x40f5ef M=1 + 43: 0x40fb2a M=1 + 44: 0x422786 M=1 + 45: 0x40f031 M=1 + 46: 0x40f49d M=1 + 47: 0x40f331 M=1 + 48: 0x40e927 M=1 + 49: 0x40f558 M=1 + 50: 0x410b56 M=1 + 51: 0x40eac1 M=1 + 52: 0x40e813 M=1 + 53: 0x40e7df M=1 + 54: 0x40f53d M=1 + 55: 0x40f180 M=1 + 56: 0x410b94 M=1 + 57: 0x40fbf6 M=1 + 58: 0x40f026 M=1 + 59: 0x40f0dc M=1 + 60: 0x40e9d3 M=1 + 61: 0x40fa7b M=1 + 62: 0x40e877 M=1 + 63: 0x4048a8 M=1 + 64: 0x40f02e M=1 + 65: 0x4048b8 M=1 + 66: 0x4277d0 M=1 + 67: 0x40f5cb M=1 + 68: 0x40fbae M=1 + 69: 0x40e8c2 M=1 + 70: 0x40f64b M=1 + 71: 0x40e82e M=1 + 72: 0x421f22 M=1 + 73: 0x40fa67 M=1 + 74: 0x40fbb1 M=1 + 75: 0x40f568 M=1 + 76: 0x40e461 M=1 + 77: 0x40ef85 M=1 + 78: 0x40f58b M=1 + 79: 0x40f08d M=1 + 80: 0x40e75c M=1 + 81: 0x410c22 M=1 + 82: 0x40fa59 M=1 + 83: 0x40f091 M=1 + 84: 0x40eb69 M=1 + 85: 0x41075a M=1 + 86: 0x40e7e9 M=1 + 87: 0x40fa97 M=1 + 88: 0x4131eb M=1 + 89: 0x40f769 M=1 + 90: 0x40f54e M=1 + 91: 0x4277d5 M=1 + 92: 0x40f0ca M=1 + 93: 0x40f051 M=1 + 94: 0x40e94f M=1 + 95: 0x40fc11 M=1 + 96: 0x41815b M=1 + 97: 0x40f4b3 M=1 + 98: 0x421fe8 M=1 + 99: 0x40e79e M=1 + 100: 0x413f29 M=1 + 101: 0x427822 M=1 + 102: 0x40ef3d M=1 + 103: 0x40e440 M=1 + 104: 0x40e767 M=1 + 105: 0x42783b M=1 + 106: 0x40fa85 M=1 + 107: 0x40fb36 M=1 + 108: 0x410bae M=1 + 109: 0x40f0d7 M=1 + 110: 0x410ba4 M=1 + 111: 0x40e87b M=1 + 112: 0x40e7c0 M=1 + 113: 0x40eae0 M=1 + 114: 0x410a99 M=1 + 115: 0x40e7bd M=1 + 116: 0x40f09d M=1 + 117: 0x410b70 M=1 + 118: 0x40f32d M=1 + 119: 0x4283ec M=1 + 120: 0x40f010 M=1 + 121: 0x40e97a M=1 + 122: 0x40f19a M=1 + 123: 0x40e779 M=1 + 124: 0x40f61d M=1 + 125: 0x40f4e1 M=1 + 126: 0x40f58f M=1 + 127: 0x41ef43 M=1 + 128: 0x41ef96 M=1 + 129: 0x41f089 M=1 + 130: 0x41f360 M=1 + 131: 0x41fc8e M=1 + 132: 0x4204c7 M=1 + 133: 0x422b03 M=1 + 134: 0x420cee M=1 + 135: 0x422150 M=1 + 136: 0x4221d9 M=1 + 137: 0x41dc0c M=1 + 138: 0x41db47 M=1 + 139: 0x672125 M=1 + 140: 0x4ac6fd M=1 + 141: 0x4abf98 M=1 + 142: 0x491fbd M=1 + 143: 0x41931f M=1 + 144: 0x40e844 M=1 + 145: 0x421ff8 M=1 + 146: 0x4277e4 M=1 + 147: 0x40e990 M=1 + 148: 0x41c53f M=1 + 149: 0x422746 M=1 + 150: 0x422b42 M=1 + 151: 0x412b5f M=1 + 152: 0x40d47b M=1 + 153: 0x40cf5e M=1 + 154: 0x40cceb M=1 + 155: 0x420b5e M=1 + 156: 0x413ab9 M=1 + 157: 0x40d56e M=1 + 158: 0x41f5a6 M=1 + 159: 0x420149 M=1 + 160: 0x40f531 M=1 + 161: 0x410b8d M=1 + 162: 0x427ac9 M=1 + 163: 0x412b91 M=1 + 164: 0x420ee3 M=1 + 165: 0x4134a8 M=1 + 166: 0x412dc7 M=1 + 167: 0x412afa M=1 + 168: 0x413a9d M=1 + 169: 0x412bf6 M=1 + 170: 0x671ed3 M=1 + 171: 0x4ac6ad M=1 + 172: 0x4abdd8 M=1 + 173: 0x671ebe M=1 + 174: 0x40c8ae M=1 + 175: 0x40d00a M=1 + 176: 0x422081 M=1 + 177: 0x672148 M=1 + 178: 0x427ad1 M=1 + 179: 0x420e54 M=1 + 180: 0x5718ff M=1 + 181: 0x575ab6 M=1 + 182: 0x572114 M=1 + 183: 0x571257 M=1 + 184: 0x462494 M=1 + 185: 0x475ea6 M=1 + 186: 0x473682 M=1 + 187: 0x471fd7 M=1 + 188: 0x471ac0 M=1 + 189: 0x46f1b2 M=1 + 190: 0x46ef32 M=1 + 191: 0x4ab9e0 M=1 + 192: 0x4acce1 M=1 + 193: 0x4ac7b6 M=1 + 194: 0x4ace6a M=1 + 195: 0x410b8a M=1 + 196: 0x40f56e M=1 + 197: 0x428176 M=1 + 198: 0x4120f3 M=1 + 199: 0x420be8 M=1 + 200: 0x412100 M=1 + 201: 0x41ef39 M=1 + 202: 0x412e38 M=1 +Mappings +1: 0x0/0xffffffffffffffff/0x0 diff --git a/src/profile/testdata/gobench.heap b/src/profile/testdata/gobench.heap new file mode 100644 index 00000000..ed449034 --- /dev/null +++ b/src/profile/testdata/gobench.heap @@ -0,0 +1,16 @@ +heap profile: 13: 1595680 [47130736: 2584596557304] @ heap/1048576 +1: 524288 [3: 1572864] @ 0x420cef 0x422151 0x4221da 0x41dc0d 0x41db48 0x74920f 0x6295ac 0x629855 0x462769 0x419320 +1: 524288 [1: 524288] @ 0x420cef 0x422151 0x4221da 0x41dc0d 0x41db48 0x74920f 0x63963f 0x419320 +1: 262144 [1: 262144] @ 0x420cef 0x422151 0x4221da 0x41dc0d 0x41db48 0x451a39 0x451ba5 0x450683 0x450077 0x4525a4 0x58e034 0x419320 +1: 262144 [1: 262144] @ 0x420cef 0x422151 0x4221da 0x41dc0d 0x41db48 0x451a39 0x451ba5 0x450683 0x450077 0x4524d4 0x401090 0x4011a1 0x416dff 0x419320 +1: 10240 [642: 6574080] @ 0x420cef 0x422151 0x4221da 0x41dc0d 0x41db48 0x477637 0x47718b 0x477056 0x4799b2 0x46bfd7 0x419320 +1: 4096 [1: 4096] @ 0x420cef 0x422151 0x4221da 0x41dc0d 0x41db48 0x526126 0x5261ea 0x4683d4 0x467e09 0x419320 +1: 4096 [1: 4096] @ 0x420cef 0x422151 0x4221da 0x41dc0d 0x41db48 0x53fbf3 0x53f85f 0x545f52 0x545a70 0x419320 +1: 2048 [1: 2048] @ 0x420cef 0x420fa9 0x414b22 0x414d20 0x4901be 0x419320 +1: 1280 [1: 1280] @ 0x420cef 0x422082 0x48dbe3 0x48d15c 0x48cdd0 0x4a9dc0 0x545bfe 0x543ac7 0x419320 +1: 384 [1: 384] @ 0x420cef 0x422151 0x4221da 0x41dc0d 0x41dd68 0x41dcbd 0x429150 0x429add 0x42e013 0x4307e2 0x4366ff 0x42c1c2 0x653e4d 0x64bdc5 0x64c359 0x65a73d 0x64cdb1 0x64be73 0x64c359 0x64c59a 0x64c205 0x64c359 0x64b778 0x5cd55c 0x45dbc3 0x543e70 0x559166 0x55ba54 0x559691 0x559985 0x5a19ff 0x543e70 +1: 288 [1: 288] @ 0x420cef 0x420fa9 0x419e19 0x41a1a8 0x419f63 0x48f09f 0x48d991 0x48cdd0 0x4a9dc0 0x545bfe 0x543ac7 0x419320 +1: 288 [2: 296] @ +1: 96 [1: 96] @ 0x420cef 0x424f35 0x4255d1 0x6fc293 0x6f9c88 0x6f9944 0x6f96be 0x6f966b 0x59f39a 0x468318 0x467e09 0x419320 +0: 0 [1: 1024] @ 0x420cef 0x422151 0x4221da 0x41dc0d 0x41dd68 0x41dcbd 0x6d71a3 0x6da87d 0x7b2c3b 0x419320 +0: 0 [1: 16] @ 0x420cef 0x422048 0x40b517 0x40b746 0x6d9ca2 0x4761c5 0x475ea7 0x46fc4f 0x46f180 0x46ef33 0x4ab821 0x4acc32 0x4ac7b7 0x4ace36 0x419320 diff --git a/src/profile/testdata/gobench.heap.string b/src/profile/testdata/gobench.heap.string new file mode 100644 index 00000000..b5adad98 --- /dev/null +++ b/src/profile/testdata/gobench.heap.string @@ -0,0 +1,137 @@ +PeriodType: space bytes +Period: 524288 +Samples: +alloc_objects/count alloc_space/bytes inuse_objects/count inuse_space/bytes + 4 2488234 1 829411: 1 2 3 4 5 6 7 8 9 10 + bytes:[524288] + 1 829411 1 829411: 1 2 3 4 5 6 11 10 + bytes:[524288] + 2 666237 2 666237: 1 2 3 4 5 12 13 14 15 16 17 10 + bytes:[262144] + 2 666237 2 666237: 1 2 3 4 5 12 13 14 15 18 19 20 21 10 + bytes:[262144] + 33192 339890635 51 529424: 1 2 3 4 5 22 23 24 25 26 10 + bytes:[10240] + 128 526338 128 526338: 1 2 3 4 5 27 28 29 30 10 + bytes:[4096] + 128 526338 128 526338: 1 2 3 4 5 31 32 33 34 10 + bytes:[4096] + 256 525312 256 525312: 1 35 36 37 38 10 + bytes:[2048] + 410 524928 410 524928: 1 39 40 41 42 43 44 45 10 + bytes:[1280] + 1365 524480 1365 524480: 1 2 3 4 46 47 48 49 50 51 52 53 54 55 56 57 58 59 56 60 61 56 62 63 64 65 66 67 68 69 70 65 + bytes:[384] + 1820 524432 1820 524432: 1 35 71 72 73 74 75 42 43 44 45 10 + bytes:[288] + 7085 1048724 1820 524432: + bytes:[288] + 5461 524336 5461 524336: 1 76 77 78 79 80 81 82 83 84 30 10 + bytes:[96] + 512 524800 0 0: 1 2 3 4 46 47 85 86 87 10 + bytes:[1024] + 32768 524296 0 0: 1 88 89 90 91 92 93 94 95 96 97 98 99 100 10 + bytes:[16] +Locations + 1: 0x420cef M=1 + 2: 0x422150 M=1 + 3: 0x4221d9 M=1 + 4: 0x41dc0c M=1 + 5: 0x41db47 M=1 + 6: 0x74920e M=1 + 7: 0x6295ab M=1 + 8: 0x629854 M=1 + 9: 0x462768 M=1 + 10: 0x41931f M=1 + 11: 0x63963e M=1 + 12: 0x451a38 M=1 + 13: 0x451ba4 M=1 + 14: 0x450682 M=1 + 15: 0x450076 M=1 + 16: 0x4525a3 M=1 + 17: 0x58e033 M=1 + 18: 0x4524d3 M=1 + 19: 0x40108f M=1 + 20: 0x4011a0 M=1 + 21: 0x416dfe M=1 + 22: 0x477636 M=1 + 23: 0x47718a M=1 + 24: 0x477055 M=1 + 25: 0x4799b1 M=1 + 26: 0x46bfd6 M=1 + 27: 0x526125 M=1 + 28: 0x5261e9 M=1 + 29: 0x4683d3 M=1 + 30: 0x467e08 M=1 + 31: 0x53fbf2 M=1 + 32: 0x53f85e M=1 + 33: 0x545f51 M=1 + 34: 0x545a6f M=1 + 35: 0x420fa8 M=1 + 36: 0x414b21 M=1 + 37: 0x414d1f M=1 + 38: 0x4901bd M=1 + 39: 0x422081 M=1 + 40: 0x48dbe2 M=1 + 41: 0x48d15b M=1 + 42: 0x48cdcf M=1 + 43: 0x4a9dbf M=1 + 44: 0x545bfd M=1 + 45: 0x543ac6 M=1 + 46: 0x41dd67 M=1 + 47: 0x41dcbc M=1 + 48: 0x42914f M=1 + 49: 0x429adc M=1 + 50: 0x42e012 M=1 + 51: 0x4307e1 M=1 + 52: 0x4366fe M=1 + 53: 0x42c1c1 M=1 + 54: 0x653e4c M=1 + 55: 0x64bdc4 M=1 + 56: 0x64c358 M=1 + 57: 0x65a73c M=1 + 58: 0x64cdb0 M=1 + 59: 0x64be72 M=1 + 60: 0x64c599 M=1 + 61: 0x64c204 M=1 + 62: 0x64b777 M=1 + 63: 0x5cd55b M=1 + 64: 0x45dbc2 M=1 + 65: 0x543e6f M=1 + 66: 0x559165 M=1 + 67: 0x55ba53 M=1 + 68: 0x559690 M=1 + 69: 0x559984 M=1 + 70: 0x5a19fe M=1 + 71: 0x419e18 M=1 + 72: 0x41a1a7 M=1 + 73: 0x419f62 M=1 + 74: 0x48f09e M=1 + 75: 0x48d990 M=1 + 76: 0x424f34 M=1 + 77: 0x4255d0 M=1 + 78: 0x6fc292 M=1 + 79: 0x6f9c87 M=1 + 80: 0x6f9943 M=1 + 81: 0x6f96bd M=1 + 82: 0x6f966a M=1 + 83: 0x59f399 M=1 + 84: 0x468317 M=1 + 85: 0x6d71a2 M=1 + 86: 0x6da87c M=1 + 87: 0x7b2c3a M=1 + 88: 0x422047 M=1 + 89: 0x40b516 M=1 + 90: 0x40b745 M=1 + 91: 0x6d9ca1 M=1 + 92: 0x4761c4 M=1 + 93: 0x475ea6 M=1 + 94: 0x46fc4e M=1 + 95: 0x46f17f M=1 + 96: 0x46ef32 M=1 + 97: 0x4ab820 M=1 + 98: 0x4acc31 M=1 + 99: 0x4ac7b6 M=1 + 100: 0x4ace35 M=1 +Mappings +1: 0x0/0xffffffffffffffff/0x0 diff --git a/src/profile/testdata/java.contention b/src/profile/testdata/java.contention new file mode 100644 index 00000000..fb484b70 --- /dev/null +++ b/src/profile/testdata/java.contention @@ -0,0 +1,43 @@ +--- contentionz 1 --- +format = java +resolution = microseconds +sampling period = 100 +ms since reset = 6019923 + 1 1 @ 0x00000003 0x00000004 + 14 1 @ 0x0000000d 0x0000000e 0x0000000f 0x00000010 0x00000011 0x00000012 0x00000013 0x00000014 0x00000017 0x00000018 0x00000019 0x0000001a 0x0000001b 0x0000001c 0x00000014 0x00000029 0x0000002a 0x0000002b 0x0000002c 0x0000002d 0x0000002e 0x0000002f 0x00000030 0x00000031 0x00000032 0x00000033 0x00000034 0x00000035 + 2 2 @ 0x00000003 0x00000004 + 2 3 @ 0x00000036 0x00000037 0x00000038 + + + 0x0000003 com.example.function03 (source.java:03) + 0x0000004 com.example.function04 (source.java:04) + 0x000000d com.example.function0d (source.java:0) + 0x000000e com.example.function0e (source.java:0) + 0x000000f com.example.function0f (source.java:0) + 0x0000010 com.example.function10 (source.java:10) + 0x0000011 com.example.function11 (source.java:11) + 0x0000012 com.example.function12 (source.java:12) + 0x0000013 com.example.function13 (source.java:13) + 0x0000014 com.example.function14 (source.java:14) + 0x0000017 com.example.function17 (source.java:17) + 0x0000018 com.example.function18 (source.java:18) + 0x0000019 com.example.function19 (source.java:19) + 0x000001a com.example.function1a (source.java:1) + 0x000001b com.example.function1b (source.java:1) + 0x000001c com.example.function1c (source.java:1) + 0x0000029 com.example.function29 (source.java:29) + 0x000002a com.example.function2a (source.java:2) + 0x000002b com.example.function2b (source.java:2) + 0x000002c com.example.function2c (source.java:2) + 0x000002d com.example.function2d (source.java:2) + 0x000002e com.example.function2e (source.java:2) + 0x000002f com.example.function2f (source.java:2) + 0x0000030 com.example.function30 (source.java:30) + 0x0000031 com.example.function31 (source.java:31) + 0x0000032 com.example.function32 (source.java:32) + 0x0000033 com.example.function33 (source.java:33) + 0x0000034 com.example.function34 (source.java:34) + 0x0000035 com.example.function35 (source.java:35) + 0x0000036 com.example.function36 (source.java:36) + 0x0000037 com.example.function37 (source.java:37) + 0x0000038 com.example.function38 (source.java:38) diff --git a/src/profile/testdata/java.contention.string b/src/profile/testdata/java.contention.string new file mode 100644 index 00000000..15870140 --- /dev/null +++ b/src/profile/testdata/java.contention.string @@ -0,0 +1,43 @@ +PeriodType: contentions count +Period: 100 +Duration: 1h40 +Samples: +contentions/microseconds delay/microseconds + 100 100: 1 2 + 100 1400: 3 4 5 6 7 8 9 10 11 12 13 14 15 16 10 17 18 19 20 21 22 23 24 25 26 27 28 29 + 200 200: 1 2 + 300 200: 30 31 32 +Locations + 1: 0x0 com.example.function03 source.java:3 s=0 + 2: 0x0 com.example.function04 source.java:4 s=0 + 3: 0x0 com.example.function0d source.java:0 s=0 + 4: 0x0 com.example.function0e source.java:0 s=0 + 5: 0x0 com.example.function0f source.java:0 s=0 + 6: 0x0 com.example.function10 source.java:10 s=0 + 7: 0x0 com.example.function11 source.java:11 s=0 + 8: 0x0 com.example.function12 source.java:12 s=0 + 9: 0x0 com.example.function13 source.java:13 s=0 + 10: 0x0 com.example.function14 source.java:14 s=0 + 11: 0x0 com.example.function17 source.java:17 s=0 + 12: 0x0 com.example.function18 source.java:18 s=0 + 13: 0x0 com.example.function19 source.java:19 s=0 + 14: 0x0 com.example.function1a source.java:1 s=0 + 15: 0x0 com.example.function1b source.java:1 s=0 + 16: 0x0 com.example.function1c source.java:1 s=0 + 17: 0x0 com.example.function29 source.java:29 s=0 + 18: 0x0 com.example.function2a source.java:2 s=0 + 19: 0x0 com.example.function2b source.java:2 s=0 + 20: 0x0 com.example.function2c source.java:2 s=0 + 21: 0x0 com.example.function2d source.java:2 s=0 + 22: 0x0 com.example.function2e source.java:2 s=0 + 23: 0x0 com.example.function2f source.java:2 s=0 + 24: 0x0 com.example.function30 source.java:30 s=0 + 25: 0x0 com.example.function31 source.java:31 s=0 + 26: 0x0 com.example.function32 source.java:32 s=0 + 27: 0x0 com.example.function33 source.java:33 s=0 + 28: 0x0 com.example.function34 source.java:34 s=0 + 29: 0x0 com.example.function35 source.java:35 s=0 + 30: 0x0 com.example.function36 source.java:36 s=0 + 31: 0x0 com.example.function37 source.java:37 s=0 + 32: 0x0 com.example.function38 source.java:38 s=0 +Mappings diff --git a/src/profile/testdata/java.cpu b/src/profile/testdata/java.cpu new file mode 100644 index 00000000..593588b7 Binary files /dev/null and b/src/profile/testdata/java.cpu differ diff --git a/src/profile/testdata/java.cpu.string b/src/profile/testdata/java.cpu.string new file mode 100644 index 00000000..f728cf26 --- /dev/null +++ b/src/profile/testdata/java.cpu.string @@ -0,0 +1,78 @@ +PeriodType: cpu nanoseconds +Period: 10000000 +Samples: +samples/count cpu/nanoseconds + 0 0: 1 + 0 0: 2 + 2 20000000: 3 + 1 10000000: 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 + 1 10000000: 19 20 21 22 23 16 17 18 + 1 10000000: 24 25 26 27 28 29 30 31 32 + 1 10000000: 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 29 30 31 32 + 1 10000000: 54 55 56 57 58 59 60 61 62 11 63 64 16 17 18 +Locations + 1: 0x0 GC :0 s=0 + 2: 0x0 Compile :0 s=0 + 3: 0x0 VM :0 s=0 + 4: 0x0 com.example.function06 source.java:6 s=0 + 5: 0x0 com.example.function07 source.java:7 s=0 + 6: 0x0 com.example.function08 source.java:8 s=0 + 7: 0x0 com.example.function09 source.java:9 s=0 + 8: 0x0 com.example.function0a source.java:0 s=0 + 9: 0x0 com.example.function0b source.java:0 s=0 + 10: 0x0 com.example.function0c source.java:0 s=0 + 11: 0x0 com.example.function0d source.java:0 s=0 + 12: 0x0 com.example.function0e source.java:0 s=0 + 13: 0x0 com.example.function0f source.java:0 s=0 + 14: 0x0 com.example.function10 source.java:10 s=0 + 15: 0x0 com.example.function11 source.java:11 s=0 + 16: 0x0 com.example.function12 source.java:12 s=0 + 17: 0x0 com.example.function13 source.java:13 s=0 + 18: 0x0 com.example.function14 source.java:14 s=0 + 19: 0x0 com.example.function1d source.java:1 s=0 + 20: 0x0 com.example.function1e source.java:1 s=0 + 21: 0x0 com.example.function1f source.java:1 s=0 + 22: 0x0 com.example.function20 source.java:20 s=0 + 23: 0x0 com.example.function21 source.java:21 s=0 + 24: 0x0 com.example.function22 source.java:22 s=0 + 25: 0x0 com.example.function23 source.java:23 s=0 + 26: 0x0 com.example.function24 source.java:24 s=0 + 27: 0x0 com.example.function25 source.java:25 s=0 + 28: 0x0 com.example.function26 source.java:26 s=0 + 29: 0x0 com.example.function27 source.java:27 s=0 + 30: 0x0 com.example.function28 source.java:28 s=0 + 31: 0x0 com.example.function29 source.java:29 s=0 + 32: 0x0 com.example.function2a source.java:2 s=0 + 33: 0x0 com.example.function2b source.java:2 s=0 + 34: 0x0 com.example.function2c source.java:2 s=0 + 35: 0x0 com.example.function2d source.java:2 s=0 + 36: 0x0 com.example.function2e source.java:2 s=0 + 37: 0x0 com.example.function2f source.java:2 s=0 + 38: 0x0 com.example.function30 source.java:30 s=0 + 39: 0x0 com.example.function31 source.java:31 s=0 + 40: 0x0 com.example.function32 source.java:32 s=0 + 41: 0x0 com.example.function33 source.java:33 s=0 + 42: 0x0 com.example.function34 source.java:34 s=0 + 43: 0x0 com.example.function35 source.java:35 s=0 + 44: 0x0 com.example.function36 source.java:36 s=0 + 45: 0x0 com.example.function37 source.java:37 s=0 + 46: 0x0 com.example.function38 source.java:38 s=0 + 47: 0x0 com.example.function39 source.java:39 s=0 + 48: 0x0 com.example.function3a source.java:3 s=0 + 49: 0x0 com.example.function3b source.java:3 s=0 + 50: 0x0 com.example.function3c source.java:3 s=0 + 51: 0x0 com.example.function3d source.java:3 s=0 + 52: 0x0 com.example.function3e source.java:3 s=0 + 53: 0x0 com.example.function3f source.java:3 s=0 + 54: 0x0 com.example.function40 source.java:40 s=0 + 55: 0x0 com.example.function41 source.java:41 s=0 + 56: 0x0 com.example.function42 source.java:42 s=0 + 57: 0x0 com.example.function43 source.java:43 s=0 + 58: 0x0 com.example.function44 source.java:44 s=0 + 59: 0x0 com.example.function45 source.java:45 s=0 + 60: 0x0 com.example.function46 source.java:46 s=0 + 61: 0x0 com.example.function47 source.java:47 s=0 + 62: 0x0 com.example.function48 source.java:48 s=0 + 63: 0x0 com.example.function49 source.java:49 s=0 + 64: 0x0 com.example.function4a source.java:4 s=0 +Mappings diff --git a/src/profile/testdata/java.heap b/src/profile/testdata/java.heap new file mode 100644 index 00000000..95e4f6e8 --- /dev/null +++ b/src/profile/testdata/java.heap @@ -0,0 +1,133 @@ +--- heapz 1 --- +format = java +resolution = bytes + 7048 1 @ 0x00000003 0x00000004 0x00000005 0x00000006 0x00000007 0x00000008 0x00000009 0x0000000a 0x0000000b 0x0000000c 0x0000000d 0x0000000e 0x0000000f 0x00000010 0x00000011 0x00000018 0x00000019 0x0000001a 0x0000001b 0x0000001c 0x0000001d 0x0000001e 0x0000001f 0x00000020 0x00000021 0x00000022 0x00000023 0x00000024 0x00000025 0x00000026 0x00000027 0x00000023 0x00000028 0x00000029 0x0000001d 0x0000001e 0x0000001f 0x00000020 0x00000021 0x00000027 0x00000023 0x00000028 0x00000029 0x0000001d 0x0000001e 0x0000001f 0x00000020 0x00000021 0x0000002a 0x00000027 0x00000023 0x00000028 0x00000029 0x0000001d 0x0000001e 0x0000001f 0x00000020 + 4752 9 @ 0x0000002b 0x0000002c 0x0000002d 0x0000002e + 880 1 @ 0x00000035 0x00000036 0x00000037 0x00000038 0x00000039 0x0000003a 0x0000003b 0x00000011 0x0000003d 0x0000003e 0x0000003f 0x00000040 0x00000041 0x00000042 0x00000011 0x00000049 0x0000004a 0x0000004b 0x0000004c 0x0000004d 0x0000004e 0x0000004b 0x0000004f 0x0000004b 0x00000050 0x00000051 0x00000052 0x00000053 0x00000054 0x00000055 0x00000056 0x00000057 + 560 1 @ 0x00000035 0x00000036 0x00000037 0x00000038 0x00000039 0x0000003a 0x0000003b 0x00000011 0x0000003d 0x0000003e 0x0000003f 0x00000040 0x00000041 0x00000042 0x00000011 0x0000005e 0x0000005f 0x00000060 0x00000061 0x00000062 0x00000063 0x00000064 0x00000065 0x00000066 0x00000067 0x00000068 0x00000069 0x0000006a 0x0000006b 0x0000006c 0x0000006d 0x0000006e 0x0000006f 0x00000070 0x00000071 0x00000072 0x00000073 0x00000074 0x00000075 0x00000067 0x00000068 + 528 1 @ 0x00000076 0x00000077 0x00000078 0x00000079 0x0000007a 0x0000007b 0x00000011 0x00000081 0x00000011 0x00000082 0x0000004e 0x0000004b 0x0000004f 0x0000004b 0x00000050 0x00000051 0x00000052 0x00000053 0x00000054 0x00000055 0x00000056 0x00000057 + 440 1 @ 0x00000083 0x00000084 0x00000085 0x00000086 0x00000087 0x00000088 0x00000089 0x0000008a 0x0000008b 0x0000008c 0x0000008d 0x0000008e 0x0000008f 0x00000090 0x00000091 0x00000092 0x00000093 0x00000094 0x00000095 0x00000096 + 240 5 @ 0x00000097 + + + 0x00000003 com.example.function003 (Source003.java:103) + 0x00000004 com.example.function004 (Source004.java:104) + 0x00000005 com.example.function005 (Source005.java:105) + 0x00000006 com.example.function006 (Source006.java:106) + 0x00000007 com.example.function007 (Source007.java:107) + 0x00000008 com.example.function008 (Source008.java:108) + 0x00000009 com.example.function009 (Source009.java:109) + 0x0000000a com.example.function00a (Source00a.java:10) + 0x0000000b com.example.function00b (Source00b.java:10) + 0x0000000c com.example.function00c (Source00c.java:10) + 0x0000000d com.example.function00d (Source00d.java:10) + 0x0000000e com.example.function00e (Source00e.java:10) + 0x0000000f com.example.function00f (Source00f.java:10) + 0x00000010 com.example.function010 (Source010.java:110) + 0x00000011 com.example.function011 (Source011.java:111) + 0x00000018 com.example.function018 (Source018.java:118) + 0x00000019 com.example.function019 (Source019.java:119) + 0x0000001a com.example.function01a (Source01a.java:11) + 0x0000001b com.example.function01b (Source01b.java:11) + 0x0000001c com.example.function01c (Source01c.java:11) + 0x0000001d com.example.function01d (Source01d.java:11) + 0x0000001e com.example.function01e (Source01e.java:11) + 0x0000001f com.example.function01f (Source01f.java:11) + 0x00000020 com.example.function020 (Source020.java:120) + 0x00000021 com.example.function021 (Source021.java:121) + 0x00000022 com.example.function022 (Source022.java:122) + 0x00000023 com.example.function023 (Source023.java:123) + 0x00000024 com.example.function024 (Source024.java:124) + 0x00000025 com.example.function025 (Source025.java:125) + 0x00000026 com.example.function026 (Source026.java:126) + 0x00000027 com.example.function027 (Source027.java:127) + 0x00000028 com.example.function028 (Source028.java:128) + 0x00000029 com.example.function029 (Source029.java:129) + 0x0000002a com.example.function02a (Source02a.java:12) + 0x0000002b com.example.function02b (Source02b.java:12) + 0x0000002c com.example.function02c (Source02c.java:12) + 0x0000002d com.example.function02d (Source02d.java:12) + 0x0000002e com.example.function02e (Source02e.java:12) + 0x00000035 com.example.function035 (Source035.java:135) + 0x00000036 com.example.function036 (Source036.java:136) + 0x00000037 com.example.function037 (Source037.java:137) + 0x00000038 com.example.function038 (Source038.java:138) + 0x00000039 com.example.function039 (Source039.java:139) + 0x0000003a com.example.function03a (Source03a.java:13) + 0x0000003b com.example.function03b (Source03b.java:13) + 0x0000003d com.example.function03d (Source03d.java:13) + 0x0000003e com.example.function03e (Source03e.java:13) + 0x0000003f com.example.function03f (Source03f.java:13) + 0x00000040 com.example.function040 (Source040.java:140) + 0x00000041 com.example.function041 (Source041.java:141) + 0x00000042 com.example.function042 (Source042.java:142) + 0x00000049 com.example.function049 (Source049.java:149) + 0x0000004a com.example.function04a (Source04a.java:14) + 0x0000004b com.example.function04b (Source04b.java:14) + 0x0000004c com.example.function04c (Source04c.java:14) + 0x0000004d com.example.function04d (Source04d.java:14) + 0x0000004e com.example.function04e (Source04e.java:14) + 0x0000004f com.example.function04f (Source04f.java:14) + 0x00000050 com.example.function050 (Source050.java:150) + 0x00000051 com.example.function051 (Source051.java:151) + 0x00000052 com.example.function052 (Source052.java:152) + 0x00000053 com.example.function053 (Source053.java:153) + 0x00000054 com.example.function054 (Source054.java:154) + 0x00000055 com.example.function055 (Source055.java:155) + 0x00000056 com.example.function056 (Source056.java:156) + 0x00000057 com.example.function057 (Source057.java:157) + 0x0000005a com.example.function05a (Source05a.java:15) + 0x0000005e com.example.function05e (Source05e.java:15) + 0x0000005f com.example.function05f (Source05f.java:15) + 0x00000060 com.example.function060 (Source060.java:160) + 0x00000061 com.example.function061 (Source061.java:161) + 0x00000062 com.example.function062 (Source062.java:162) + 0x00000063 com.example.function063 (Source063.java:163) + 0x00000064 com.example.function064 (Source064.java:164) + 0x00000065 com.example.function065 (Source065.java:165) + 0x00000066 com.example.function066 (Source066.java:166) + 0x00000067 com.example.function067 (Source067.java:167) + 0x00000068 com.example.function068 (Source068.java:168) + 0x00000069 com.example.function069 (Source069.java:169) + 0x0000006a com.example.function06a (Source06a.java:16) + 0x0000006b com.example.function06b (Source06b.java:16) + 0x0000006c com.example.function06c (Source06c.java:16) + 0x0000006d com.example.function06d (Source06d.java:16) + 0x0000006e com.example.function06e (Source06e.java:16) + 0x0000006f com.example.function06f (Source06f.java:16) + 0x00000070 com.example.function070 (Source070.java:170) + 0x00000071 com.example.function071 (Source071.java:171) + 0x00000072 com.example.function072 (Source072.java:172) + 0x00000073 com.example.function073 (Source073.java:173) + 0x00000074 com.example.function074 (Source074.java:174) + 0x00000075 com.example.function075 (Source075.java:175) + 0x00000076 com.example.function076 (Source076.java:176) + 0x00000077 com.example.function077 (Source077.java:177) + 0x00000078 com.example.function078 (Source078.java:178) + 0x00000079 com.example.function079 (Source079.java:179) + 0x0000007a com.example.function07a (Source07a.java:17) + 0x0000007b com.example.function07b (Source07b.java:17) + 0x0000007d com.example.function07d (Source07d.java:17) + 0x00000081 com.example.function081 (Source081.java:181) + 0x00000082 com.example.function082 (Source082.java:182) + 0x00000083 com.example.function083 (Source083.java:183) + 0x00000084 com.example.function084 (Source084.java:184) + 0x00000085 com.example.function085 (Source085.java:185) + 0x00000086 com.example.function086 (Source086.java:186) + 0x00000087 com.example.function087 (Source087.java:187) + 0x00000088 com.example.function088 (Source088.java:188) + 0x00000089 com.example.function089 (Source089.java:189) + 0x0000008a com.example.function08a (Source08a.java:18) + 0x0000008b com.example.function08b (Source08b.java:18) + 0x0000008c com.example.function08c (Source08c.java:18) + 0x0000008d com.example.function08d (Source08d.java:18) + 0x0000008e com.example.function08e (Source08e.java:18) + 0x0000008f com.example.function08f (Source08f.java:18) + 0x00000090 com.example.function090 (Source090.java:190) + 0x00000091 com.example.function091 (Source091.java:191) + 0x00000092 com.example.function092 (Source092.java:192) + 0x00000093 com.example.function093 (Source093.java:193) + 0x00000094 com.example.function094 (Source094.java:194) + 0x00000095 com.example.function095 (Source095.java:195) + 0x00000096 com.example.function096 (Source096.java:196) + 0x00000097 com.example.function097 (Source097.java:197) diff --git a/src/profile/testdata/java.heap.string b/src/profile/testdata/java.heap.string new file mode 100644 index 00000000..50d44105 --- /dev/null +++ b/src/profile/testdata/java.heap.string @@ -0,0 +1,139 @@ +PeriodType: +Period: 0 +Samples: +inuse_objects/count inuse_space/bytes + 74 527819: 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 27 32 33 21 22 23 24 25 31 27 32 33 21 22 23 24 25 34 31 27 32 33 21 22 23 24 + bytes:[7048] + 8941 4720968: 35 36 37 38 + bytes:[528] + 596 524728: 39 40 41 42 43 44 45 15 46 47 48 49 50 51 15 52 53 54 55 56 57 54 58 54 59 60 61 62 63 64 65 66 + bytes:[880] + 936 524568: 39 40 41 42 43 44 45 15 46 47 48 49 50 51 15 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 76 77 + bytes:[560] + 993 524552: 91 92 93 94 95 96 15 97 15 98 57 54 58 54 59 60 61 62 63 64 65 66 + bytes:[528] + 1192 524508: 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 + bytes:[440] + 54615 2621560: 119 + bytes:[48] +Locations + 1: 0x0 com.example.function003 Source003.java:103 s=0 + 2: 0x0 com.example.function004 Source004.java:104 s=0 + 3: 0x0 com.example.function005 Source005.java:105 s=0 + 4: 0x0 com.example.function006 Source006.java:106 s=0 + 5: 0x0 com.example.function007 Source007.java:107 s=0 + 6: 0x0 com.example.function008 Source008.java:108 s=0 + 7: 0x0 com.example.function009 Source009.java:109 s=0 + 8: 0x0 com.example.function00a Source00a.java:10 s=0 + 9: 0x0 com.example.function00b Source00b.java:10 s=0 + 10: 0x0 com.example.function00c Source00c.java:10 s=0 + 11: 0x0 com.example.function00d Source00d.java:10 s=0 + 12: 0x0 com.example.function00e Source00e.java:10 s=0 + 13: 0x0 com.example.function00f Source00f.java:10 s=0 + 14: 0x0 com.example.function010 Source010.java:110 s=0 + 15: 0x0 com.example.function011 Source011.java:111 s=0 + 16: 0x0 com.example.function018 Source018.java:118 s=0 + 17: 0x0 com.example.function019 Source019.java:119 s=0 + 18: 0x0 com.example.function01a Source01a.java:11 s=0 + 19: 0x0 com.example.function01b Source01b.java:11 s=0 + 20: 0x0 com.example.function01c Source01c.java:11 s=0 + 21: 0x0 com.example.function01d Source01d.java:11 s=0 + 22: 0x0 com.example.function01e Source01e.java:11 s=0 + 23: 0x0 com.example.function01f Source01f.java:11 s=0 + 24: 0x0 com.example.function020 Source020.java:120 s=0 + 25: 0x0 com.example.function021 Source021.java:121 s=0 + 26: 0x0 com.example.function022 Source022.java:122 s=0 + 27: 0x0 com.example.function023 Source023.java:123 s=0 + 28: 0x0 com.example.function024 Source024.java:124 s=0 + 29: 0x0 com.example.function025 Source025.java:125 s=0 + 30: 0x0 com.example.function026 Source026.java:126 s=0 + 31: 0x0 com.example.function027 Source027.java:127 s=0 + 32: 0x0 com.example.function028 Source028.java:128 s=0 + 33: 0x0 com.example.function029 Source029.java:129 s=0 + 34: 0x0 com.example.function02a Source02a.java:12 s=0 + 35: 0x0 com.example.function02b Source02b.java:12 s=0 + 36: 0x0 com.example.function02c Source02c.java:12 s=0 + 37: 0x0 com.example.function02d Source02d.java:12 s=0 + 38: 0x0 com.example.function02e Source02e.java:12 s=0 + 39: 0x0 com.example.function035 Source035.java:135 s=0 + 40: 0x0 com.example.function036 Source036.java:136 s=0 + 41: 0x0 com.example.function037 Source037.java:137 s=0 + 42: 0x0 com.example.function038 Source038.java:138 s=0 + 43: 0x0 com.example.function039 Source039.java:139 s=0 + 44: 0x0 com.example.function03a Source03a.java:13 s=0 + 45: 0x0 com.example.function03b Source03b.java:13 s=0 + 46: 0x0 com.example.function03d Source03d.java:13 s=0 + 47: 0x0 com.example.function03e Source03e.java:13 s=0 + 48: 0x0 com.example.function03f Source03f.java:13 s=0 + 49: 0x0 com.example.function040 Source040.java:140 s=0 + 50: 0x0 com.example.function041 Source041.java:141 s=0 + 51: 0x0 com.example.function042 Source042.java:142 s=0 + 52: 0x0 com.example.function049 Source049.java:149 s=0 + 53: 0x0 com.example.function04a Source04a.java:14 s=0 + 54: 0x0 com.example.function04b Source04b.java:14 s=0 + 55: 0x0 com.example.function04c Source04c.java:14 s=0 + 56: 0x0 com.example.function04d Source04d.java:14 s=0 + 57: 0x0 com.example.function04e Source04e.java:14 s=0 + 58: 0x0 com.example.function04f Source04f.java:14 s=0 + 59: 0x0 com.example.function050 Source050.java:150 s=0 + 60: 0x0 com.example.function051 Source051.java:151 s=0 + 61: 0x0 com.example.function052 Source052.java:152 s=0 + 62: 0x0 com.example.function053 Source053.java:153 s=0 + 63: 0x0 com.example.function054 Source054.java:154 s=0 + 64: 0x0 com.example.function055 Source055.java:155 s=0 + 65: 0x0 com.example.function056 Source056.java:156 s=0 + 66: 0x0 com.example.function057 Source057.java:157 s=0 + 67: 0x0 com.example.function05e Source05e.java:15 s=0 + 68: 0x0 com.example.function05f Source05f.java:15 s=0 + 69: 0x0 com.example.function060 Source060.java:160 s=0 + 70: 0x0 com.example.function061 Source061.java:161 s=0 + 71: 0x0 com.example.function062 Source062.java:162 s=0 + 72: 0x0 com.example.function063 Source063.java:163 s=0 + 73: 0x0 com.example.function064 Source064.java:164 s=0 + 74: 0x0 com.example.function065 Source065.java:165 s=0 + 75: 0x0 com.example.function066 Source066.java:166 s=0 + 76: 0x0 com.example.function067 Source067.java:167 s=0 + 77: 0x0 com.example.function068 Source068.java:168 s=0 + 78: 0x0 com.example.function069 Source069.java:169 s=0 + 79: 0x0 com.example.function06a Source06a.java:16 s=0 + 80: 0x0 com.example.function06b Source06b.java:16 s=0 + 81: 0x0 com.example.function06c Source06c.java:16 s=0 + 82: 0x0 com.example.function06d Source06d.java:16 s=0 + 83: 0x0 com.example.function06e Source06e.java:16 s=0 + 84: 0x0 com.example.function06f Source06f.java:16 s=0 + 85: 0x0 com.example.function070 Source070.java:170 s=0 + 86: 0x0 com.example.function071 Source071.java:171 s=0 + 87: 0x0 com.example.function072 Source072.java:172 s=0 + 88: 0x0 com.example.function073 Source073.java:173 s=0 + 89: 0x0 com.example.function074 Source074.java:174 s=0 + 90: 0x0 com.example.function075 Source075.java:175 s=0 + 91: 0x0 com.example.function076 Source076.java:176 s=0 + 92: 0x0 com.example.function077 Source077.java:177 s=0 + 93: 0x0 com.example.function078 Source078.java:178 s=0 + 94: 0x0 com.example.function079 Source079.java:179 s=0 + 95: 0x0 com.example.function07a Source07a.java:17 s=0 + 96: 0x0 com.example.function07b Source07b.java:17 s=0 + 97: 0x0 com.example.function081 Source081.java:181 s=0 + 98: 0x0 com.example.function082 Source082.java:182 s=0 + 99: 0x0 com.example.function083 Source083.java:183 s=0 + 100: 0x0 com.example.function084 Source084.java:184 s=0 + 101: 0x0 com.example.function085 Source085.java:185 s=0 + 102: 0x0 com.example.function086 Source086.java:186 s=0 + 103: 0x0 com.example.function087 Source087.java:187 s=0 + 104: 0x0 com.example.function088 Source088.java:188 s=0 + 105: 0x0 com.example.function089 Source089.java:189 s=0 + 106: 0x0 com.example.function08a Source08a.java:18 s=0 + 107: 0x0 com.example.function08b Source08b.java:18 s=0 + 108: 0x0 com.example.function08c Source08c.java:18 s=0 + 109: 0x0 com.example.function08d Source08d.java:18 s=0 + 110: 0x0 com.example.function08e Source08e.java:18 s=0 + 111: 0x0 com.example.function08f Source08f.java:18 s=0 + 112: 0x0 com.example.function090 Source090.java:190 s=0 + 113: 0x0 com.example.function091 Source091.java:191 s=0 + 114: 0x0 com.example.function092 Source092.java:192 s=0 + 115: 0x0 com.example.function093 Source093.java:193 s=0 + 116: 0x0 com.example.function094 Source094.java:194 s=0 + 117: 0x0 com.example.function095 Source095.java:195 s=0 + 118: 0x0 com.example.function096 Source096.java:196 s=0 + 119: 0x0 com.example.function097 Source097.java:197 s=0 +Mappings diff --git a/src/proto/profile.proto b/src/proto/profile.proto new file mode 100644 index 00000000..c1996820 --- /dev/null +++ b/src/proto/profile.proto @@ -0,0 +1,180 @@ +// Profile is a common stacktrace profile format. +// +// Measurements represented with this format should follow the +// following conventions: +// +// - Consumers should treat unset optional fields as if they had been +// set with their default value. +// +// - When possible, measurements should be stored in "unsampled" form +// that is most useful to humans. There should be enough +// information present to determine the original sampled values. +// +// - On-disk, the serialized proto must be gzip-compressed. +// +// - The profile is represented as a set of samples, where each sample +// references a sequence of locations, and where each location belongs +// to a mapping. +// - There is a N->1 relationship from sample.location_id entries to +// locations. For every sample.location_id entry there must be a +// unique Location with that id. +// - There is an optional N->1 relationship from locations to +// mappings. For every nonzero Location.mapping_id there must be a +// unique Mapping with that id. + +syntax = "proto3"; + +package perftools.profiles; + +option java_package = "com.google.perftools.profiles"; +option java_outer_classname = "ProfileProto"; + +message Profile { + // A description of the samples associated with each Sample.value. + // For a cpu profile this might be: + // [["cpu","nanoseconds"]] or [["wall","seconds"]] or [["syscall","count"]] + // For a heap profile, this might be: + // [["allocations","count"], ["space","bytes"]], + // If one of the values represents the number of events represented + // by the sample, by convention it should be at index 0 and use + // sample_type.unit == "count". + repeated ValueType sample_type = 1; + // The set of samples recorded in this profile. + repeated Sample sample = 2; + // Mapping from address ranges to the image/binary/library mapped + // into that address range. mapping[0] will be the main binary. + repeated Mapping mapping = 3; + // Useful program location + repeated Location location = 4; + // Functions referenced by locations + repeated Function function = 5; + // A common table for strings referenced by various messages. + // string_table[0] must always be "". + repeated string string_table = 6 [enforce_utf8 = false]; + // frames with Function.function_name fully matching the following + // regexp will be dropped from the samples, along with their successors. + int64 drop_frames = 7; // Index into string table. + // frames with Function.function_name fully matching the following + // regexp will be kept, even if it matches drop_functions. + int64 keep_frames = 8; // Index into string table. + + // The following fields are informational, do not affect + // interpretation of results. + + // Time of collection (UTC) represented as nanoseconds past the epoch. + int64 time_nanos = 9; + // Duration of the profile, if a duration makes sense. + int64 duration_nanos = 10; + // The kind of events between sampled ocurrences. + // e.g [ "cpu","cycles" ] or [ "heap","bytes" ] + ValueType period_type = 11; + // The number of events between sampled occurrences. + int64 period = 12; + // Freeform text associated to the profile. + repeated int64 comment = 13; // Indices into string table. +} + +// ValueType describes the semantics and measurement units of a value. +message ValueType { + int64 type = 1; // Index into string table. + int64 unit = 2; // Index into string table. +} + +// Each Sample records values encountered in some program +// context. The program context is typically a stack trace, perhaps +// augmented with auxiliary information like the thread-id, some +// indicator of a higher level request being handled etc. +message Sample { + // The ids recorded here correspond to a Profile.location.id. + // The leaf is at location_id[0]. + repeated uint64 location_id = 1; + // The type and unit of each value is defined by the corresponding + // entry in Profile.sample_type. All samples must have the same + // number of values, the same as the length of Profile.sample_type. + // When aggregating multiple samples into a single sample, the + // result has a list of values that is the elemntwise sum of the + // lists of the originals. + repeated int64 value = 2; + // label includes additional context for this sample. It can include + // things like a thread id, allocation size, etc + repeated Label label = 3; +} + +message Label { + int64 key = 1; // Index into string table + + // At most one of the following must be present + int64 str = 2; // Index into string table + int64 num = 3; +} + +message Mapping { + // Unique nonzero id for the mapping. + uint64 id = 1; + // Address at which the binary (or DLL) is loaded into memory. + uint64 memory_start = 2; + // The limit of the address range occupied by this mapping. + uint64 memory_limit = 3; + // Offset in the binary that corresponds to the first mapped address. + uint64 file_offset = 4; + // The object this entry is loaded from. This can be a filename on + // disk for the main binary and shared libraries, or virtual + // abstractions like "[vdso]". + int64 filename = 5; // Index into string table + // A string that uniquely identifies a particular program version + // with high probability. E.g., for binaries generated by GNU tools, + // it could be the contents of the .note.gnu.build-id field. + int64 build_id = 6; // Index into string table + + // The following fields indicate the resolution of symbolic info. + bool has_functions = 7; + bool has_filenames = 8; + bool has_line_numbers = 9; + bool has_inline_frames = 10; +} + +// Describes function and line table debug information. +message Location { + // Unique nonzero id for the location. A profile could use + // instruction addresses or any integer sequence as ids. + uint64 id = 1; + // The id of the corresponding profile.Mapping for this location. + // If can be unset if the mapping is unknown or not applicable for + // this profile type. + uint64 mapping_id = 2; + // The instruction address for this location, if available. It + // should be within [Mapping.memory_start...Mapping.memory_limit] + // for the corresponding mapping. A non-leaf address may be in the + // middle of a call instruction. It is up to display tools to find + // the beginning of the instruction if necessary. + uint64 address = 3; + // Multiple line indicates this location has inlined functions, + // where the last entry represents the caller into which the + // preceding entries were inlined. + // + // E.g., if memcpy() is inlined into printf: + // line[0].function_name == "memcpy" + // line[1].function_name == "printf" + repeated Line line = 4; +} + +message Line { + // The id of the corresponding profile.Function for this line. + uint64 function_id = 1; + // Line number in source code. + int64 line = 2; +} + +message Function { + // Unique nonzero id for the function. + uint64 id = 1; + // Name of the function, in human-readable form if available. + int64 name = 2; // Index into string table + // Name of the function, as identified by the system. + // For instance, it can be a C++ mangled name. + int64 system_name = 3; // Index into string table + // Source file containing the function. + int64 filename = 4; // Index into string table + // Line number in source file. + int64 start_line = 5; +} diff --git a/third_party/src/golang/demangle/LICENSE b/third_party/src/golang/demangle/LICENSE new file mode 100644 index 00000000..d29b3726 --- /dev/null +++ b/third_party/src/golang/demangle/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2015 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/third_party/src/golang/demangle/README.md b/third_party/src/golang/demangle/README.md new file mode 100644 index 00000000..ef3f94a6 --- /dev/null +++ b/third_party/src/golang/demangle/README.md @@ -0,0 +1,3 @@ +# github.com/ianlancetaylor/demangle + +A Go package that can be used to demangle C++ symbol names. diff --git a/third_party/src/golang/demangle/ast.go b/third_party/src/golang/demangle/ast.go new file mode 100644 index 00000000..4881e154 --- /dev/null +++ b/third_party/src/golang/demangle/ast.go @@ -0,0 +1,2642 @@ +// Copyright 2015 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package demangle + +import ( + "bytes" + "fmt" + "strings" +) + +// AST is an abstract syntax tree representing a C++ declaration. +// This is sufficient for the demangler but is by no means a general C++ AST. +type AST interface { + // Internal method to convert to demangled string. + print(*printState) + + // Traverse each element of an AST. If the function returns + // false, traversal of children of that element is skipped. + Traverse(func(AST) bool) + + // Copy an AST with possible transformations. + // If the skip function returns true, no copy is required. + // If the copy function returns nil, no copy is required. + // Otherwise the AST returned by copy is used in a copy of the full AST. + // Copy itself returns either a copy or nil. + Copy(copy func(AST) AST, skip func(AST) bool) AST + + // Implement the fmt.GoStringer interface. + GoString() string + goString(indent int, field string) string +} + +// ASTToString returns the demangled name of the AST. +func ASTToString(a AST, options ...Option) string { + tparams := true + for _, o := range options { + switch o { + case NoTemplateParams: + tparams = false + } + } + + ps := printState{tparams: tparams} + a.print(&ps) + return ps.buf.String() +} + +// The printState type holds information needed to print an AST. +type printState struct { + tparams bool // whether to print template parameters + + buf bytes.Buffer + last byte // Last byte written to buffer. + + // The inner field is a list of items to print for a type + // name. This is used by types to implement the inside-out + // C++ declaration syntax. + inner []AST +} + +// writeByte adds a byte to the string being printed. +func (ps *printState) writeByte(b byte) { + ps.last = b + ps.buf.WriteByte(b) +} + +// writeString adds a string to the string being printed. +func (ps *printState) writeString(s string) { + if len(s) > 0 { + ps.last = s[len(s)-1] + } + ps.buf.WriteString(s) +} + +// Name is an unqualified name. +type Name struct { + Name string +} + +func (n *Name) print(ps *printState) { + ps.writeString(n.Name) +} + +func (n *Name) Traverse(fn func(AST) bool) { + fn(n) +} + +func (n *Name) Copy(fn func(AST) AST, skip func(AST) bool) AST { + if skip(n) { + return nil + } + return fn(n) +} + +func (n *Name) GoString() string { + return n.goString(0, "Name: ") +} + +func (n *Name) goString(indent int, field string) string { + return fmt.Sprintf("%*s%s%s", indent, "", field, n.Name) +} + +// Typed is a typed name. +type Typed struct { + Name AST + Type AST +} + +func (t *Typed) print(ps *printState) { + // We are printing a typed name, so ignore the current set of + // inner names to print. Pass down our name as the one to use. + holdInner := ps.inner + defer func() { ps.inner = holdInner }() + + ps.inner = []AST{t} + t.Type.print(ps) + if len(ps.inner) > 0 { + // The type did not print the name; print it now in + // the default location. + ps.writeByte(' ') + t.Name.print(ps) + } +} + +func (t *Typed) printInner(ps *printState) { + t.Name.print(ps) +} + +func (t *Typed) Traverse(fn func(AST) bool) { + if fn(t) { + t.Name.Traverse(fn) + t.Type.Traverse(fn) + } +} + +func (t *Typed) Copy(fn func(AST) AST, skip func(AST) bool) AST { + if skip(t) { + return nil + } + name := t.Name.Copy(fn, skip) + typ := t.Type.Copy(fn, skip) + if name == nil && typ == nil { + return fn(t) + } + if name == nil { + name = t.Name + } + if typ == nil { + typ = t.Type + } + t = &Typed{Name: name, Type: typ} + if r := fn(t); r != nil { + return r + } + return t +} + +func (t *Typed) GoString() string { + return t.goString(0, "") +} + +func (t *Typed) goString(indent int, field string) string { + return fmt.Sprintf("%*s%sTyped:\n%s\n%s", indent, "", field, + t.Name.goString(indent+2, "Name: "), + t.Type.goString(indent+2, "Type: ")) +} + +// Qualified is a name in a scope. +type Qualified struct { + Scope AST + Name AST + + // The LocalName field is true if this is parsed as a + // . We shouldn't really need this, but in some + // cases (for the unary sizeof operator) the standard + // demangler prints a local name slightly differently. We + // keep track of this for compatibility. + LocalName bool // A full local name encoding +} + +func (q *Qualified) print(ps *printState) { + q.Scope.print(ps) + ps.writeString("::") + q.Name.print(ps) +} + +func (q *Qualified) Traverse(fn func(AST) bool) { + if fn(q) { + q.Scope.Traverse(fn) + q.Name.Traverse(fn) + } +} + +func (q *Qualified) Copy(fn func(AST) AST, skip func(AST) bool) AST { + if skip(q) { + return nil + } + scope := q.Scope.Copy(fn, skip) + name := q.Name.Copy(fn, skip) + if scope == nil && name == nil { + return fn(q) + } + if scope == nil { + scope = q.Scope + } + if name == nil { + name = q.Name + } + q = &Qualified{Scope: scope, Name: name, LocalName: q.LocalName} + if r := fn(q); r != nil { + return r + } + return q +} + +func (q *Qualified) GoString() string { + return q.goString(0, "") +} + +func (q *Qualified) goString(indent int, field string) string { + s := "" + if q.LocalName { + s = " LocalName: true" + } + return fmt.Sprintf("%*s%sQualified:%s\n%s\n%s", indent, "", field, + s, q.Scope.goString(indent+2, "Scope: "), + q.Name.goString(indent+2, "Name: ")) +} + +// Template is a template with arguments. +type Template struct { + Name AST + Args []AST +} + +func (t *Template) print(ps *printState) { + // Inner types apply to the template as a whole, they don't + // cross over into the template. + holdInner := ps.inner + defer func() { ps.inner = holdInner }() + + ps.inner = nil + t.Name.print(ps) + + if !ps.tparams { + // Do not print template parameters. + return + } + // We need an extra space after operator<. + if ps.last == '<' { + ps.writeByte(' ') + } + + ps.writeByte('<') + first := true + for _, a := range t.Args { + if ps.isEmpty(a) { + continue + } + if !first { + ps.writeString(", ") + } + a.print(ps) + first = false + } + if ps.last == '>' { + // Avoid syntactic ambiguity in old versions of C++. + ps.writeByte(' ') + } + ps.writeByte('>') +} + +func (t *Template) Traverse(fn func(AST) bool) { + if fn(t) { + t.Name.Traverse(fn) + for _, a := range t.Args { + a.Traverse(fn) + } + } +} + +func (t *Template) Copy(fn func(AST) AST, skip func(AST) bool) AST { + if skip(t) { + return nil + } + name := t.Name.Copy(fn, skip) + changed := name != nil + args := make([]AST, len(t.Args)) + for i, a := range t.Args { + ac := a.Copy(fn, skip) + if ac == nil { + args[i] = a + } else { + args[i] = ac + changed = true + } + } + if !changed { + return fn(t) + } + if name == nil { + name = t.Name + } + t = &Template{Name: name, Args: args} + if r := fn(t); r != nil { + return r + } + return t +} + +func (t *Template) GoString() string { + return t.goString(0, "") +} + +func (t *Template) goString(indent int, field string) string { + var args string + if len(t.Args) == 0 { + args = fmt.Sprintf("%*sArgs: nil", indent+2, "") + } else { + args = fmt.Sprintf("%*sArgs:", indent+2, "") + for i, a := range t.Args { + args += "\n" + args += a.goString(indent+4, fmt.Sprintf("%d: ", i)) + } + } + return fmt.Sprintf("%*s%sTemplate (%p):\n%s\n%s", indent, "", field, t, + t.Name.goString(indent+2, "Name: "), args) +} + +// TemplateParam is a template parameter. The Template field is +// filled in while parsing the demangled string. We don't normally +// see these while printing--they are replaced by the simplify +// function. +type TemplateParam struct { + Index int + Template *Template +} + +func (tp *TemplateParam) print(ps *printState) { + if tp.Template == nil { + panic("TemplateParam Template field is nil") + } + if tp.Index >= len(tp.Template.Args) { + panic("TemplateParam Index out of bounds") + } + tp.Template.Args[tp.Index].print(ps) +} + +func (tp *TemplateParam) Traverse(fn func(AST) bool) { + fn(tp) + // Don't traverse Template--it points elsewhere in the AST. +} + +func (tp *TemplateParam) Copy(fn func(AST) AST, skip func(AST) bool) AST { + if skip(tp) { + return nil + } + return fn(tp) +} + +func (tp *TemplateParam) GoString() string { + return tp.goString(0, "") +} + +func (tp *TemplateParam) goString(indent int, field string) string { + return fmt.Sprintf("%*s%sTemplateParam: Template: %p; Index %d", indent, "", field, tp.Template, tp.Index) +} + +// Qualifiers is an ordered list of type qualifiers. +type Qualifiers []string + +// TypeWithQualifiers is a type with standard qualifiers. +type TypeWithQualifiers struct { + Base AST + Qualifiers Qualifiers +} + +func (twq *TypeWithQualifiers) print(ps *printState) { + // Give the base type a chance to print the inner types. + ps.inner = append(ps.inner, twq) + twq.Base.print(ps) + if len(ps.inner) > 0 { + // The qualifier wasn't printed by Base. + ps.writeByte(' ') + ps.writeString(strings.Join(twq.Qualifiers, " ")) + ps.inner = ps.inner[:len(ps.inner)-1] + } +} + +// Print qualifiers as an inner type by just printing the qualifiers. +func (twq *TypeWithQualifiers) printInner(ps *printState) { + ps.writeByte(' ') + ps.writeString(strings.Join(twq.Qualifiers, " ")) +} + +func (twq *TypeWithQualifiers) Traverse(fn func(AST) bool) { + if fn(twq) { + twq.Base.Traverse(fn) + } +} + +func (twq *TypeWithQualifiers) Copy(fn func(AST) AST, skip func(AST) bool) AST { + if skip(twq) { + return nil + } + base := twq.Base.Copy(fn, skip) + if base == nil { + return fn(twq) + } + twq = &TypeWithQualifiers{Base: base, Qualifiers: twq.Qualifiers} + if r := fn(twq); r != nil { + return r + } + return twq +} + +func (twq *TypeWithQualifiers) GoString() string { + return twq.goString(0, "") +} + +func (twq *TypeWithQualifiers) goString(indent int, field string) string { + return fmt.Sprintf("%*s%sTypeWithQualifiers: Qualifiers: %s\n%s", indent, "", field, + twq.Qualifiers, twq.Base.goString(indent+2, "Base: ")) +} + +// MethodWithQualifiers is a method with qualifiers. +type MethodWithQualifiers struct { + Method AST + Qualifiers Qualifiers + RefQualifier string // "" or "&" or "&&" +} + +func (mwq *MethodWithQualifiers) print(ps *printState) { + // Give the base type a chance to print the inner types. + ps.inner = append(ps.inner, mwq) + mwq.Method.print(ps) + if len(ps.inner) > 0 { + if len(mwq.Qualifiers) > 0 { + ps.writeByte(' ') + ps.writeString(strings.Join(mwq.Qualifiers, " ")) + } + if mwq.RefQualifier != "" { + ps.writeByte(' ') + ps.writeString(mwq.RefQualifier) + } + ps.inner = ps.inner[:len(ps.inner)-1] + } +} + +func (mwq *MethodWithQualifiers) printInner(ps *printState) { + if len(mwq.Qualifiers) > 0 { + ps.writeByte(' ') + ps.writeString(strings.Join(mwq.Qualifiers, " ")) + } + if mwq.RefQualifier != "" { + ps.writeByte(' ') + ps.writeString(mwq.RefQualifier) + } +} + +func (mwq *MethodWithQualifiers) Traverse(fn func(AST) bool) { + if fn(mwq) { + mwq.Method.Traverse(fn) + } +} + +func (mwq *MethodWithQualifiers) Copy(fn func(AST) AST, skip func(AST) bool) AST { + if skip(mwq) { + return nil + } + method := mwq.Method.Copy(fn, skip) + if method == nil { + return fn(mwq) + } + mwq = &MethodWithQualifiers{Method: method, Qualifiers: mwq.Qualifiers, RefQualifier: mwq.RefQualifier} + if r := fn(mwq); r != nil { + return r + } + return mwq +} + +func (mwq *MethodWithQualifiers) GoString() string { + return mwq.goString(0, "") +} + +func (mwq *MethodWithQualifiers) goString(indent int, field string) string { + var q string + if len(mwq.Qualifiers) > 0 { + q += fmt.Sprintf(" Qualifiers: %v", mwq.Qualifiers) + } + if mwq.RefQualifier != "" { + if q != "" { + q += ";" + } + q += " RefQualifier: " + mwq.RefQualifier + } + return fmt.Sprintf("%*s%sMethodWithQualifiers:%s\n%s", indent, "", field, + q, mwq.Method.goString(indent+2, "Method: ")) +} + +// BuiltinType is a builtin type, like "int". +type BuiltinType struct { + Name string +} + +func (bt *BuiltinType) print(ps *printState) { + ps.writeString(bt.Name) +} + +func (bt *BuiltinType) Traverse(fn func(AST) bool) { + fn(bt) +} + +func (bt *BuiltinType) Copy(fn func(AST) AST, skip func(AST) bool) AST { + if skip(bt) { + return nil + } + return fn(bt) +} + +func (bt *BuiltinType) GoString() string { + return bt.goString(0, "") +} + +func (bt *BuiltinType) goString(indent int, field string) string { + return fmt.Sprintf("%*s%sBuiltinType: %s", indent, "", field, bt.Name) +} + +// printBase is common print code for types that are printed with a +// simple suffix. +func printBase(ps *printState, qual, base AST) { + ps.inner = append(ps.inner, qual) + base.print(ps) + if len(ps.inner) > 0 { + qual.(innerPrinter).printInner(ps) + ps.inner = ps.inner[:len(ps.inner)-1] + } +} + +// PointerType is a pointer type. +type PointerType struct { + Base AST +} + +func (pt *PointerType) print(ps *printState) { + printBase(ps, pt, pt.Base) +} + +func (pt *PointerType) printInner(ps *printState) { + ps.writeString("*") +} + +func (pt *PointerType) Traverse(fn func(AST) bool) { + if fn(pt) { + pt.Base.Traverse(fn) + } +} + +func (pt *PointerType) Copy(fn func(AST) AST, skip func(AST) bool) AST { + if skip(pt) { + return nil + } + base := pt.Base.Copy(fn, skip) + if base == nil { + return fn(pt) + } + pt = &PointerType{Base: base} + if r := fn(pt); r != nil { + return r + } + return pt +} + +func (pt *PointerType) GoString() string { + return pt.goString(0, "") +} + +func (pt *PointerType) goString(indent int, field string) string { + return fmt.Sprintf("%*s%sPointerType:\n%s", indent, "", field, + pt.Base.goString(indent+2, "")) +} + +// ReferenceType is a reference type. +type ReferenceType struct { + Base AST +} + +func (rt *ReferenceType) print(ps *printState) { + printBase(ps, rt, rt.Base) +} + +func (rt *ReferenceType) printInner(ps *printState) { + ps.writeString("&") +} + +func (rt *ReferenceType) Traverse(fn func(AST) bool) { + if fn(rt) { + rt.Base.Traverse(fn) + } +} + +func (rt *ReferenceType) Copy(fn func(AST) AST, skip func(AST) bool) AST { + if skip(rt) { + return nil + } + base := rt.Base.Copy(fn, skip) + if base == nil { + return fn(rt) + } + rt = &ReferenceType{Base: base} + if r := fn(rt); r != nil { + return r + } + return rt +} + +func (rt *ReferenceType) GoString() string { + return rt.goString(0, "") +} + +func (rt *ReferenceType) goString(indent int, field string) string { + return fmt.Sprintf("%*s%sReferenceType:\n%s", indent, "", field, + rt.Base.goString(indent+2, "")) +} + +// RvalueReferenceType is an rvalue reference type. +type RvalueReferenceType struct { + Base AST +} + +func (rt *RvalueReferenceType) print(ps *printState) { + printBase(ps, rt, rt.Base) +} + +func (rt *RvalueReferenceType) printInner(ps *printState) { + ps.writeString("&&") +} + +func (rt *RvalueReferenceType) Traverse(fn func(AST) bool) { + if fn(rt) { + rt.Base.Traverse(fn) + } +} + +func (rt *RvalueReferenceType) Copy(fn func(AST) AST, skip func(AST) bool) AST { + if skip(rt) { + return nil + } + base := rt.Base.Copy(fn, skip) + if base == nil { + return fn(rt) + } + rt = &RvalueReferenceType{Base: base} + if r := fn(rt); r != nil { + return r + } + return rt +} + +func (rt *RvalueReferenceType) GoString() string { + return rt.goString(0, "") +} + +func (rt *RvalueReferenceType) goString(indent int, field string) string { + return fmt.Sprintf("%*s%sRvalueReferenceType:\n%s", indent, "", field, + rt.Base.goString(indent+2, "")) +} + +// ComplexType is a complex type. +type ComplexType struct { + Base AST +} + +func (ct *ComplexType) print(ps *printState) { + printBase(ps, ct, ct.Base) +} + +func (ct *ComplexType) printInner(ps *printState) { + ps.writeString(" _Complex") +} + +func (ct *ComplexType) Traverse(fn func(AST) bool) { + if fn(ct) { + ct.Base.Traverse(fn) + } +} + +func (ct *ComplexType) Copy(fn func(AST) AST, skip func(AST) bool) AST { + if skip(ct) { + return nil + } + base := ct.Base.Copy(fn, skip) + if base == nil { + return fn(ct) + } + ct = &ComplexType{Base: base} + if r := fn(ct); r != nil { + return r + } + return ct +} + +func (ct *ComplexType) GoString() string { + return ct.goString(0, "") +} + +func (ct *ComplexType) goString(indent int, field string) string { + return fmt.Sprintf("%*s%sComplexType:\n%s", indent, "", field, + ct.Base.goString(indent+2, "")) +} + +// ImaginaryType is an imaginary type. +type ImaginaryType struct { + Base AST +} + +func (it *ImaginaryType) print(ps *printState) { + printBase(ps, it, it.Base) +} + +func (it *ImaginaryType) printInner(ps *printState) { + ps.writeString(" _Imaginary") +} + +func (it *ImaginaryType) Traverse(fn func(AST) bool) { + if fn(it) { + it.Base.Traverse(fn) + } +} + +func (it *ImaginaryType) Copy(fn func(AST) AST, skip func(AST) bool) AST { + if skip(it) { + return nil + } + base := it.Base.Copy(fn, skip) + if base == nil { + return fn(it) + } + it = &ImaginaryType{Base: base} + if r := fn(it); r != nil { + return r + } + return it +} + +func (it *ImaginaryType) GoString() string { + return it.goString(0, "") +} + +func (it *ImaginaryType) goString(indent int, field string) string { + return fmt.Sprintf("%*s%sImaginaryType:\n%s", indent, "", field, + it.Base.goString(indent+2, "")) +} + +// VendorQualifier is a type qualified by a vendor-specific qualifier. +type VendorQualifier struct { + Qualifier AST + Type AST +} + +func (vq *VendorQualifier) print(ps *printState) { + ps.inner = append(ps.inner, vq) + vq.Type.print(ps) + if len(ps.inner) > 0 { + ps.printOneInner(nil) + } +} + +func (vq *VendorQualifier) printInner(ps *printState) { + ps.writeByte(' ') + vq.Qualifier.print(ps) +} + +func (vq *VendorQualifier) Traverse(fn func(AST) bool) { + if fn(vq) { + vq.Qualifier.Traverse(fn) + vq.Type.Traverse(fn) + } +} + +func (vq *VendorQualifier) Copy(fn func(AST) AST, skip func(AST) bool) AST { + if skip(vq) { + return nil + } + qualifier := vq.Qualifier.Copy(fn, skip) + typ := vq.Type.Copy(fn, skip) + if qualifier == nil && typ == nil { + return fn(vq) + } + if qualifier == nil { + qualifier = vq.Qualifier + } + if typ == nil { + typ = vq.Type + } + vq = &VendorQualifier{Qualifier: qualifier, Type: vq.Type} + if r := fn(vq); r != nil { + return r + } + return vq +} + +func (vq *VendorQualifier) GoString() string { + return vq.goString(0, "") +} + +func (vq *VendorQualifier) goString(indent int, field string) string { + return fmt.Sprintf("%*s%sVendorQualifier:\n%s\n%s", indent, "", field, + vq.Qualifier.goString(indent+2, "Qualifier: "), + vq.Type.goString(indent+2, "Type: ")) +} + +// ArrayType is an array type. +type ArrayType struct { + Dimension AST + Element AST +} + +func (at *ArrayType) print(ps *printState) { + // Pass the array type down as an inner type so that we print + // multi-dimensional arrays correctly. + ps.inner = append(ps.inner, at) + at.Element.print(ps) + if ln := len(ps.inner); ln > 0 { + ps.inner = ps.inner[:ln-1] + at.printDimension(ps) + } +} + +func (at *ArrayType) printInner(ps *printState) { + at.printDimension(ps) +} + +// Print the array dimension. +func (at *ArrayType) printDimension(ps *printState) { + space := " " + for len(ps.inner) > 0 { + // We haven't gotten to the real type yet. Use + // parentheses around that type, except that if it is + // an array type we print it as a multi-dimensional + // array + in := ps.inner[len(ps.inner)-1] + if twq, ok := in.(*TypeWithQualifiers); ok { + in = twq.Base + } + if _, ok := in.(*ArrayType); ok { + if in == ps.inner[len(ps.inner)-1] { + space = "" + } + ps.printOneInner(nil) + } else { + ps.writeString(" (") + ps.printInner(false) + ps.writeByte(')') + } + } + ps.writeString(space) + ps.writeByte('[') + at.Dimension.print(ps) + ps.writeByte(']') +} + +func (at *ArrayType) Traverse(fn func(AST) bool) { + if fn(at) { + at.Dimension.Traverse(fn) + at.Element.Traverse(fn) + } +} + +func (at *ArrayType) Copy(fn func(AST) AST, skip func(AST) bool) AST { + if skip(at) { + return nil + } + dimension := at.Dimension.Copy(fn, skip) + element := at.Element.Copy(fn, skip) + if dimension == nil && element == nil { + return fn(at) + } + if dimension == nil { + dimension = at.Dimension + } + if element == nil { + element = at.Element + } + at = &ArrayType{Dimension: dimension, Element: element} + if r := fn(at); r != nil { + return r + } + return at +} + +func (at *ArrayType) GoString() string { + return at.goString(0, "") +} + +func (at *ArrayType) goString(indent int, field string) string { + return fmt.Sprintf("%*s%sArrayType:\n%s\n%s", indent, "", field, + at.Dimension.goString(indent+2, "Dimension: "), + at.Element.goString(indent+2, "Element: ")) +} + +// FunctionType is a function type. The Return field may be nil for +// cases where the return type is not part of the mangled name. +type FunctionType struct { + Return AST + Args []AST +} + +func (ft *FunctionType) print(ps *printState) { + if ft.Return != nil { + // Pass the return type as an inner type in order to + // print the arguments in the right location. + ps.inner = append(ps.inner, ft) + ft.Return.print(ps) + if len(ps.inner) == 0 { + // Everything was printed. + return + } + ps.inner = ps.inner[:len(ps.inner)-1] + ps.writeByte(' ') + } + ft.printArgs(ps) +} + +func (ft *FunctionType) printInner(ps *printState) { + ft.printArgs(ps) +} + +// printArgs prints the arguments of a function type. It looks at the +// inner types for spacing. +func (ft *FunctionType) printArgs(ps *printState) { + paren := false + space := false + for i := len(ps.inner) - 1; i >= 0; i-- { + switch ps.inner[i].(type) { + case *PointerType, *ReferenceType, *RvalueReferenceType: + paren = true + case *TypeWithQualifiers, *ComplexType, *ImaginaryType, *PtrMem: + space = true + paren = true + } + if paren { + break + } + } + + if paren { + if !space && (ps.last != '(' && ps.last != '*') { + space = true + } + if space && ps.last != ' ' { + ps.writeByte(' ') + } + ps.writeByte('(') + } + + save := ps.printInner(true) + + if paren { + ps.writeByte(')') + } + + ps.writeByte('(') + first := true + for _, a := range ft.Args { + if ps.isEmpty(a) { + continue + } + if !first { + ps.writeString(", ") + } + a.print(ps) + first = false + } + ps.writeByte(')') + + ps.inner = save + ps.printInner(false) +} + +func (ft *FunctionType) Traverse(fn func(AST) bool) { + if fn(ft) { + if ft.Return != nil { + ft.Return.Traverse(fn) + } + for _, a := range ft.Args { + a.Traverse(fn) + } + } +} + +func (ft *FunctionType) Copy(fn func(AST) AST, skip func(AST) bool) AST { + if skip(ft) { + return nil + } + changed := false + var ret AST + if ft.Return != nil { + ret = ft.Return.Copy(fn, skip) + if ret == nil { + ret = ft.Return + } else { + changed = true + } + } + args := make([]AST, len(ft.Args)) + for i, a := range ft.Args { + ac := a.Copy(fn, skip) + if ac == nil { + args[i] = a + } else { + args[i] = ac + changed = true + } + } + if !changed { + return fn(ft) + } + ft = &FunctionType{Return: ret, Args: args} + if r := fn(ft); r != nil { + return r + } + return ft +} + +func (ft *FunctionType) GoString() string { + return ft.goString(0, "") +} + +func (ft *FunctionType) goString(indent int, field string) string { + var r string + if ft.Return == nil { + r = fmt.Sprintf("%*sReturn: nil", indent+2, "") + } else { + r = ft.Return.goString(indent+2, "Return: ") + } + var args string + if len(ft.Args) == 0 { + args = fmt.Sprintf("%*sArgs: nil", indent+2, "") + } else { + args = fmt.Sprintf("%*sArgs:", indent+2, "") + for i, a := range ft.Args { + args += "\n" + args += a.goString(indent+4, fmt.Sprintf("%d: ", i)) + } + } + return fmt.Sprintf("%*s%sFunctionType:\n%s\n%s", indent, "", field, r, args) +} + +// FunctionParam is a parameter of a function, used for last-specified +// return type in a closure. +type FunctionParam struct { + Index int +} + +func (fp *FunctionParam) print(ps *printState) { + if fp.Index == 0 { + ps.writeString("this") + } else { + fmt.Fprintf(&ps.buf, "{parm#%d}", fp.Index) + } +} + +func (fp *FunctionParam) Traverse(fn func(AST) bool) { + fn(fp) +} + +func (fp *FunctionParam) Copy(fn func(AST) AST, skip func(AST) bool) AST { + if skip(fp) { + return nil + } + return fn(fp) +} + +func (fp *FunctionParam) GoString() string { + return fp.goString(0, "") +} + +func (fp *FunctionParam) goString(indent int, field string) string { + return fmt.Sprintf("%*s%sFunctionParam: %d", indent, "", field, fp.Index) +} + +// PtrMem is a pointer-to-member expression. +type PtrMem struct { + Class AST + Member AST +} + +func (pm *PtrMem) print(ps *printState) { + ps.inner = append(ps.inner, pm) + pm.Member.print(ps) + if len(ps.inner) > 0 { + ps.printOneInner(nil) + } +} + +func (pm *PtrMem) printInner(ps *printState) { + if ps.last != '(' { + ps.writeByte(' ') + } + pm.Class.print(ps) + ps.writeString("::*") +} + +func (pm *PtrMem) Traverse(fn func(AST) bool) { + if fn(pm) { + pm.Class.Traverse(fn) + pm.Member.Traverse(fn) + } +} + +func (pm *PtrMem) Copy(fn func(AST) AST, skip func(AST) bool) AST { + if skip(pm) { + return nil + } + class := pm.Class.Copy(fn, skip) + member := pm.Member.Copy(fn, skip) + if class == nil && member == nil { + return fn(pm) + } + if class == nil { + class = pm.Class + } + if member == nil { + member = pm.Member + } + pm = &PtrMem{Class: class, Member: member} + if r := fn(pm); r != nil { + return r + } + return pm +} + +func (pm *PtrMem) GoString() string { + return pm.goString(0, "") +} + +func (pm *PtrMem) goString(indent int, field string) string { + return fmt.Sprintf("%*s%sPtrMem:\n%s\n%s", indent, "", field, + pm.Class.goString(indent+2, "Class: "), + pm.Member.goString(indent+2, "Member: ")) +} + +// FixedType is a fixed numeric type of unknown size. +type FixedType struct { + Base AST + Accum bool + Sat bool +} + +func (ft *FixedType) print(ps *printState) { + if ft.Sat { + ps.writeString("_Sat ") + } + if bt, ok := ft.Base.(*BuiltinType); ok && bt.Name == "int" { + // The standard demangler skips printing "int". + } else { + ft.Base.print(ps) + ps.writeByte(' ') + } + if ft.Accum { + ps.writeString("_Accum") + } else { + ps.writeString("_Fract") + } +} + +func (ft *FixedType) Traverse(fn func(AST) bool) { + if fn(ft) { + ft.Base.Traverse(fn) + } +} + +func (ft *FixedType) Copy(fn func(AST) AST, skip func(AST) bool) AST { + if skip(ft) { + return nil + } + base := ft.Base.Copy(fn, skip) + if base == nil { + return fn(ft) + } + ft = &FixedType{Base: base, Accum: ft.Accum, Sat: ft.Sat} + if r := fn(ft); r != nil { + return r + } + return ft +} + +func (ft *FixedType) GoString() string { + return ft.goString(0, "") +} + +func (ft *FixedType) goString(indent int, field string) string { + return fmt.Sprintf("%*s%sFixedType: Accum: %t; Sat: %t\n%s", indent, "", field, + ft.Accum, ft.Sat, + ft.Base.goString(indent+2, "Base: ")) +} + +// VectorType is a vector type. +type VectorType struct { + Dimension AST + Base AST +} + +func (vt *VectorType) print(ps *printState) { + ps.inner = append(ps.inner, vt) + vt.Base.print(ps) + if len(ps.inner) > 0 { + ps.printOneInner(nil) + } +} + +func (vt *VectorType) printInner(ps *printState) { + ps.writeString(" __vector(") + vt.Dimension.print(ps) + ps.writeByte(')') +} + +func (vt *VectorType) Traverse(fn func(AST) bool) { + if fn(vt) { + vt.Dimension.Traverse(fn) + vt.Base.Traverse(fn) + } +} + +func (vt *VectorType) Copy(fn func(AST) AST, skip func(AST) bool) AST { + if skip(vt) { + return nil + } + dimension := vt.Dimension.Copy(fn, skip) + base := vt.Base.Copy(fn, skip) + if dimension == nil && base == nil { + return fn(vt) + } + if dimension == nil { + dimension = vt.Dimension + } + if base == nil { + base = vt.Base + } + vt = &VectorType{Dimension: dimension, Base: base} + if r := fn(vt); r != nil { + return r + } + return vt +} + +func (vt *VectorType) GoString() string { + return vt.goString(0, "") +} + +func (vt *VectorType) goString(indent int, field string) string { + return fmt.Sprintf("%*s%sVectorType:\n%s\n%s", indent, "", field, + vt.Dimension.goString(indent+2, "Dimension: "), + vt.Base.goString(indent+2, "Base: ")) +} + +// Decltype is the decltype operator. +type Decltype struct { + Expr AST +} + +func (dt *Decltype) print(ps *printState) { + ps.writeString("decltype (") + dt.Expr.print(ps) + ps.writeByte(')') +} + +func (dt *Decltype) Traverse(fn func(AST) bool) { + if fn(dt) { + dt.Expr.Traverse(fn) + } +} + +func (dt *Decltype) Copy(fn func(AST) AST, skip func(AST) bool) AST { + if skip(dt) { + return nil + } + expr := dt.Expr.Copy(fn, skip) + if expr == nil { + return fn(dt) + } + dt = &Decltype{Expr: expr} + if r := fn(dt); r != nil { + return r + } + return dt +} + +func (dt *Decltype) GoString() string { + return dt.goString(0, "") +} + +func (dt *Decltype) goString(indent int, field string) string { + return fmt.Sprintf("%*s%sDecltype:\n%s", indent, "", field, + dt.Expr.goString(indent+2, "Expr: ")) +} + +// Operator is an operator. +type Operator struct { + Name string +} + +func (op *Operator) print(ps *printState) { + ps.writeString("operator") + if isLower(op.Name[0]) { + ps.writeByte(' ') + } + n := op.Name + n = strings.TrimSuffix(n, " ") + ps.writeString(n) +} + +func (op *Operator) Traverse(fn func(AST) bool) { + fn(op) +} + +func (op *Operator) Copy(fn func(AST) AST, skip func(AST) bool) AST { + if skip(op) { + return nil + } + return fn(op) +} + +func (op *Operator) GoString() string { + return op.goString(0, "") +} + +func (op *Operator) goString(indent int, field string) string { + return fmt.Sprintf("%*s%sOperator: %s", indent, "", field, op.Name) +} + +// Constructor is a constructor. +type Constructor struct { + Name AST +} + +func (c *Constructor) print(ps *printState) { + c.Name.print(ps) +} + +func (c *Constructor) Traverse(fn func(AST) bool) { + if fn(c) { + c.Name.Traverse(fn) + } +} + +func (c *Constructor) Copy(fn func(AST) AST, skip func(AST) bool) AST { + if skip(c) { + return nil + } + name := c.Name.Copy(fn, skip) + if name == nil { + return fn(c) + } + c = &Constructor{Name: name} + if r := fn(c); r != nil { + return r + } + return c +} + +func (c *Constructor) GoString() string { + return c.goString(0, "") +} + +func (c *Constructor) goString(indent int, field string) string { + return fmt.Sprintf("%*s%sConstructor:\n%s", indent, "", field, c.Name.goString(indent+2, "Name: ")) +} + +// Destructor is a destructor. +type Destructor struct { + Name AST +} + +func (d *Destructor) print(ps *printState) { + ps.writeByte('~') + d.Name.print(ps) +} + +func (d *Destructor) Traverse(fn func(AST) bool) { + if fn(d) { + d.Name.Traverse(fn) + } +} + +func (d *Destructor) Copy(fn func(AST) AST, skip func(AST) bool) AST { + if skip(d) { + return nil + } + name := d.Name.Copy(fn, skip) + if name == nil { + return fn(d) + } + d = &Destructor{Name: name} + if r := fn(d); r != nil { + return r + } + return d +} + +func (d *Destructor) GoString() string { + return d.goString(0, "") +} + +func (d *Destructor) goString(indent int, field string) string { + return fmt.Sprintf("%*s%sDestructor:\n%s", indent, "", field, d.Name.goString(indent+2, "Name: ")) +} + +// GlobalCDtor is a global constructor or destructor. +type GlobalCDtor struct { + Ctor bool + Key AST +} + +func (gcd *GlobalCDtor) print(ps *printState) { + ps.writeString("global ") + if gcd.Ctor { + ps.writeString("constructors") + } else { + ps.writeString("destructors") + } + ps.writeString(" keyed to ") + gcd.Key.print(ps) +} + +func (gcd *GlobalCDtor) Traverse(fn func(AST) bool) { + if fn(gcd) { + gcd.Key.Traverse(fn) + } +} + +func (gcd *GlobalCDtor) Copy(fn func(AST) AST, skip func(AST) bool) AST { + if skip(gcd) { + return nil + } + key := gcd.Key.Copy(fn, skip) + if key == nil { + return fn(gcd) + } + gcd = &GlobalCDtor{Ctor: gcd.Ctor, Key: key} + if r := fn(gcd); r != nil { + return r + } + return gcd +} + +func (gcd *GlobalCDtor) GoString() string { + return gcd.goString(0, "") +} + +func (gcd *GlobalCDtor) goString(indent int, field string) string { + return fmt.Sprintf("%*s%sGlobalCDtor: Ctor: %t\n%s", indent, "", field, + gcd.Ctor, gcd.Key.goString(indent+2, "Key: ")) +} + +// TaggedName is a name with an ABI tag. +type TaggedName struct { + Name AST + Tag AST +} + +func (t *TaggedName) print(ps *printState) { + t.Name.print(ps) + ps.writeString("[abi:") + t.Tag.print(ps) + ps.writeByte(']') +} + +func (t *TaggedName) Traverse(fn func(AST) bool) { + if fn(t) { + t.Name.Traverse(fn) + t.Tag.Traverse(fn) + } +} + +func (t *TaggedName) Copy(fn func(AST) AST, skip func(AST) bool) AST { + if skip(t) { + return nil + } + name := t.Name.Copy(fn, skip) + tag := t.Tag.Copy(fn, skip) + if name == nil && tag == nil { + return fn(t) + } + if name == nil { + name = t.Name + } + if tag == nil { + tag = t.Tag + } + t = &TaggedName{Name: name, Tag: tag} + if r := fn(t); r != nil { + return r + } + return t +} + +func (t *TaggedName) GoString() string { + return t.goString(0, "") +} + +func (t *TaggedName) goString(indent int, field string) string { + return fmt.Sprintf("%*s%sTaggedName:\n%s\n%s", indent, "", field, + t.Name.goString(indent+2, "Name: "), + t.Tag.goString(indent+2, "Tag: ")) +} + +// PackExpansion is a pack expansion. The Pack field may be nil. +type PackExpansion struct { + Base AST + Pack *ArgumentPack +} + +func (pe *PackExpansion) print(ps *printState) { + // We normally only get here if the simplify function was + // unable to locate and expand the pack. + if pe.Pack == nil { + parenthesize(ps, pe.Base) + ps.writeString("...") + } else { + pe.Base.print(ps) + } +} + +func (pe *PackExpansion) Traverse(fn func(AST) bool) { + if fn(pe) { + pe.Base.Traverse(fn) + // Don't traverse Template--it points elsewhere in the AST. + } +} + +func (pe *PackExpansion) Copy(fn func(AST) AST, skip func(AST) bool) AST { + if skip(pe) { + return nil + } + base := pe.Base.Copy(fn, skip) + if base == nil { + return fn(pe) + } + pe = &PackExpansion{Base: base, Pack: pe.Pack} + if r := fn(pe); r != nil { + return r + } + return pe +} + +func (pe *PackExpansion) GoString() string { + return pe.goString(0, "") +} + +func (pe *PackExpansion) goString(indent int, field string) string { + return fmt.Sprintf("%*s%sPackExpansion: Pack: %p\n%s", indent, "", field, + pe.Pack, pe.Base.goString(indent+2, "Base: ")) +} + +// ArgumentPack is an argument pack. +type ArgumentPack struct { + Args []AST +} + +func (ap *ArgumentPack) print(ps *printState) { + for i, a := range ap.Args { + if i > 0 { + ps.writeString(", ") + } + a.print(ps) + } +} + +func (ap *ArgumentPack) Traverse(fn func(AST) bool) { + if fn(ap) { + for _, a := range ap.Args { + a.Traverse(fn) + } + } +} + +func (ap *ArgumentPack) Copy(fn func(AST) AST, skip func(AST) bool) AST { + if skip(ap) { + return nil + } + args := make([]AST, len(ap.Args)) + changed := false + for i, a := range ap.Args { + ac := a.Copy(fn, skip) + if ac == nil { + args[i] = a + } else { + args[i] = ac + changed = true + } + } + if !changed { + return fn(ap) + } + ap = &ArgumentPack{Args: args} + if r := fn(ap); r != nil { + return r + } + return ap +} + +func (ap *ArgumentPack) GoString() string { + return ap.goString(0, "") +} + +func (ap *ArgumentPack) goString(indent int, field string) string { + if len(ap.Args) == 0 { + return fmt.Sprintf("%*s%sArgumentPack: nil", indent, "", field) + } + s := fmt.Sprintf("%*s%sArgumentPack:", indent, "", field) + for i, a := range ap.Args { + s += "\n" + s += a.goString(indent+2, fmt.Sprintf("%d: ", i)) + } + return s +} + +// Cast is a type cast. +type Cast struct { + To AST +} + +func (c *Cast) print(ps *printState) { + ps.writeString("operator ") + c.To.print(ps) +} + +func (c *Cast) Traverse(fn func(AST) bool) { + if fn(c) { + c.To.Traverse(fn) + } +} + +func (c *Cast) Copy(fn func(AST) AST, skip func(AST) bool) AST { + if skip(c) { + return nil + } + to := c.To.Copy(fn, skip) + if to == nil { + return fn(c) + } + c = &Cast{To: to} + if r := fn(c); r != nil { + return r + } + return c +} + +func (c *Cast) GoString() string { + return c.goString(0, "") +} + +func (c *Cast) goString(indent int, field string) string { + return fmt.Sprintf("%*s%sCast\n%s", indent, "", field, + c.To.goString(indent+2, "To: ")) +} + +// The parenthesize function prints the string for val, wrapped in +// parentheses if necessary. +func parenthesize(ps *printState, val AST) { + paren := false + switch v := val.(type) { + case *Name, *InitializerList, *FunctionParam: + case *Qualified: + if v.LocalName { + paren = true + } + default: + paren = true + } + if paren { + ps.writeByte('(') + } + val.print(ps) + if paren { + ps.writeByte(')') + } +} + +// Nullary is an operator in an expression with no arguments, such as +// throw. +type Nullary struct { + Op AST +} + +func (n *Nullary) print(ps *printState) { + if op, ok := n.Op.(*Operator); ok { + ps.writeString(op.Name) + } else { + n.Op.print(ps) + } +} + +func (n *Nullary) Traverse(fn func(AST) bool) { + if fn(n) { + n.Op.Traverse(fn) + } +} + +func (n *Nullary) Copy(fn func(AST) AST, skip func(AST) bool) AST { + if skip(n) { + return nil + } + op := n.Op.Copy(fn, skip) + if op == nil { + return fn(n) + } + n = &Nullary{Op: op} + if r := fn(n); r != nil { + return r + } + return n +} + +func (n *Nullary) GoString() string { + return n.goString(0, "") +} + +func (n *Nullary) goString(indent int, field string) string { + return fmt.Sprintf("%*s%sNullary:\n%s", indent, "", field, + n.Op.goString(indent+2, "Op: ")) +} + +// Unary is a unary operation in an expression. +type Unary struct { + Op AST + Expr AST + Suffix bool // true for ++ -- when used as postfix + SizeofType bool // true for sizeof (type) +} + +func (u *Unary) print(ps *printState) { + expr := u.Expr + + // Don't print the argument list when taking the address of a + // function. + if op, ok := u.Op.(*Operator); ok && op.Name == "&" { + if t, ok := expr.(*Typed); ok { + if _, ok := t.Type.(*FunctionType); ok { + expr = t.Name + } + } + } + + if u.Suffix { + parenthesize(ps, expr) + } + + if op, ok := u.Op.(*Operator); ok { + ps.writeString(op.Name) + } else if c, ok := u.Op.(*Cast); ok { + ps.writeByte('(') + c.To.print(ps) + ps.writeByte(')') + } else { + u.Op.print(ps) + } + + if !u.Suffix { + if op, ok := u.Op.(*Operator); ok && op.Name == "::" { + // Don't use parentheses after ::. + expr.print(ps) + } else if u.SizeofType { + // Always use parentheses for sizeof argument. + ps.writeByte('(') + expr.print(ps) + ps.writeByte(')') + } else { + parenthesize(ps, expr) + } + } +} + +func (u *Unary) Traverse(fn func(AST) bool) { + if fn(u) { + u.Op.Traverse(fn) + u.Expr.Traverse(fn) + } +} + +func (u *Unary) Copy(fn func(AST) AST, skip func(AST) bool) AST { + if skip(u) { + return nil + } + op := u.Op.Copy(fn, skip) + expr := u.Expr.Copy(fn, skip) + if op == nil && expr == nil { + return fn(u) + } + if op == nil { + op = u.Op + } + if expr == nil { + expr = u.Expr + } + u = &Unary{Op: op, Expr: expr, Suffix: u.Suffix, SizeofType: u.SizeofType} + if r := fn(u); r != nil { + return r + } + return u +} + +func (u *Unary) GoString() string { + return u.goString(0, "") +} + +func (u *Unary) goString(indent int, field string) string { + var s string + if u.Suffix { + s = " Suffix: true" + } + if u.SizeofType { + s += " SizeofType: true" + } + return fmt.Sprintf("%*s%sUnary:%s\n%s\n%s", indent, "", field, + s, u.Op.goString(indent+2, "Op: "), + u.Expr.goString(indent+2, "Expr: ")) +} + +// Binary is a binary operation in an expression. +type Binary struct { + Op AST + Left AST + Right AST +} + +func (b *Binary) print(ps *printState) { + op, _ := b.Op.(*Operator) + + if op != nil && strings.Contains(op.Name, "cast") { + ps.writeString(op.Name) + ps.writeByte('<') + b.Left.print(ps) + ps.writeString(">(") + b.Right.print(ps) + ps.writeByte(')') + return + } + + // Use an extra set of parentheses around an expression that + // uses the greater-than operator, so that it does not get + // confused with the '>' that ends template parameters. + if op != nil && op.Name == ">" { + ps.writeByte('(') + } + + left := b.Left + + // A function call in an expression should not print the types + // of the arguments. + if op != nil && op.Name == "()" { + if ty, ok := b.Left.(*Typed); ok { + left = ty.Name + } + } + + parenthesize(ps, left) + + if op != nil && op.Name == "[]" { + ps.writeByte('[') + b.Right.print(ps) + ps.writeByte(']') + return + } + + if op != nil { + if op.Name != "()" { + ps.writeString(op.Name) + } + } else { + b.Op.print(ps) + } + + parenthesize(ps, b.Right) + + if op != nil && op.Name == ">" { + ps.writeByte(')') + } +} + +func (b *Binary) Traverse(fn func(AST) bool) { + if fn(b) { + b.Op.Traverse(fn) + b.Left.Traverse(fn) + b.Right.Traverse(fn) + } +} + +func (b *Binary) Copy(fn func(AST) AST, skip func(AST) bool) AST { + if skip(b) { + return nil + } + op := b.Op.Copy(fn, skip) + left := b.Left.Copy(fn, skip) + right := b.Right.Copy(fn, skip) + if op == nil && left == nil && right == nil { + return fn(b) + } + if op == nil { + op = b.Op + } + if left == nil { + left = b.Left + } + if right == nil { + right = b.Right + } + b = &Binary{Op: op, Left: left, Right: right} + if r := fn(b); r != nil { + return r + } + return b +} + +func (b *Binary) GoString() string { + return b.goString(0, "") +} + +func (b *Binary) goString(indent int, field string) string { + return fmt.Sprintf("%*s%sBinary:\n%s\n%s\n%s", indent, "", field, + b.Op.goString(indent+2, "Op: "), + b.Left.goString(indent+2, "Left: "), + b.Right.goString(indent+2, "Right: ")) +} + +// Trinary is the ?: trinary operation in an expression. +type Trinary struct { + Op AST + First AST + Second AST + Third AST +} + +func (t *Trinary) print(ps *printState) { + parenthesize(ps, t.First) + ps.writeByte('?') + parenthesize(ps, t.Second) + ps.writeString(" : ") + parenthesize(ps, t.Third) +} + +func (t *Trinary) Traverse(fn func(AST) bool) { + if fn(t) { + t.Op.Traverse(fn) + t.First.Traverse(fn) + t.Second.Traverse(fn) + t.Third.Traverse(fn) + } +} + +func (t *Trinary) Copy(fn func(AST) AST, skip func(AST) bool) AST { + if skip(t) { + return nil + } + op := t.Op.Copy(fn, skip) + first := t.First.Copy(fn, skip) + second := t.Second.Copy(fn, skip) + third := t.Third.Copy(fn, skip) + if op == nil && first == nil && second == nil && third == nil { + return fn(t) + } + if op == nil { + op = t.Op + } + if first == nil { + first = t.First + } + if second == nil { + second = t.Second + } + if third == nil { + third = t.Third + } + t = &Trinary{Op: op, First: first, Second: second, Third: third} + if r := fn(t); r != nil { + return r + } + return t +} + +func (t *Trinary) GoString() string { + return t.goString(0, "") +} + +func (t *Trinary) goString(indent int, field string) string { + return fmt.Sprintf("%*s%sTrinary:\n%s\n%s\n%s\n%s", indent, "", field, + t.Op.goString(indent+2, "Op: "), + t.First.goString(indent+2, "First: "), + t.Second.goString(indent+2, "Second: "), + t.Third.goString(indent+2, "Third: ")) +} + +// New is a use of operator new in an expression. +type New struct { + Op AST + Place AST + Type AST + Init AST +} + +func (n *New) print(ps *printState) { + // Op doesn't really matter for printing--we always print "new". + ps.writeString("new ") + if n.Place != nil { + parenthesize(ps, n.Place) + ps.writeByte(' ') + } + n.Type.print(ps) + if n.Init != nil { + parenthesize(ps, n.Init) + } +} + +func (n *New) Traverse(fn func(AST) bool) { + if fn(n) { + n.Op.Traverse(fn) + n.Place.Traverse(fn) + n.Type.Traverse(fn) + n.Init.Traverse(fn) + } +} + +func (n *New) Copy(fn func(AST) AST, skip func(AST) bool) AST { + if skip(n) { + return nil + } + op := n.Op.Copy(fn, skip) + var place AST + if n.Place != nil { + place = n.Place.Copy(fn, skip) + } + typ := n.Type.Copy(fn, skip) + var ini AST + if n.Init != nil { + ini = n.Init.Copy(fn, skip) + } + if op == nil && place == nil && typ == nil && ini == nil { + return fn(n) + } + if op == nil { + op = n.Op + } + if place == nil { + place = n.Place + } + if typ == nil { + typ = n.Type + } + if ini == nil { + ini = n.Init + } + n = &New{Op: op, Place: place, Type: typ, Init: ini} + if r := fn(n); r != nil { + return r + } + return n +} + +func (n *New) GoString() string { + return n.goString(0, "") +} + +func (n *New) goString(indent int, field string) string { + var place string + if n.Place == nil { + place = fmt.Sprintf("%*sPlace: nil", indent, "") + } else { + place = n.Place.goString(indent+2, "Place: ") + } + var ini string + if n.Init == nil { + ini = fmt.Sprintf("%*sInit: nil", indent, "") + } else { + ini = n.Init.goString(indent+2, "Init: ") + } + return fmt.Sprintf("%*s%sNew:\n%s\n%s\n%s\n%s", indent, "", field, + n.Op.goString(indent+2, "Op: "), place, + n.Type.goString(indent+2, "Type: "), ini) +} + +// Literal is a literal in an expression. +type Literal struct { + Type AST + Val string + Neg bool +} + +// Suffixes to use for constants of the given integer type. +var builtinTypeSuffix = map[string]string{ + "int": "", + "unsigned int": "u", + "long": "l", + "unsigned long": "ul", + "long long": "ll", + "unsigned long long": "ull", +} + +// Builtin float types. +var builtinTypeFloat = map[string]bool{ + "double": true, + "long double": true, + "float": true, + "__float128": true, + "half": true, +} + +func (l *Literal) print(ps *printState) { + isFloat := false + if b, ok := l.Type.(*BuiltinType); ok { + if suffix, ok := builtinTypeSuffix[b.Name]; ok { + if l.Neg { + ps.writeByte('-') + } + ps.writeString(l.Val) + ps.writeString(suffix) + return + } else if b.Name == "bool" && !l.Neg { + switch l.Val { + case "0": + ps.writeString("false") + return + case "1": + ps.writeString("true") + return + } + } else { + isFloat = builtinTypeFloat[b.Name] + } + } + + ps.writeByte('(') + l.Type.print(ps) + ps.writeByte(')') + + if isFloat { + ps.writeByte('[') + } + if l.Neg { + ps.writeByte('-') + } + ps.writeString(l.Val) + if isFloat { + ps.writeByte(']') + } +} + +func (l *Literal) Traverse(fn func(AST) bool) { + if fn(l) { + l.Type.Traverse(fn) + } +} + +func (l *Literal) Copy(fn func(AST) AST, skip func(AST) bool) AST { + if skip(l) { + return nil + } + typ := l.Type.Copy(fn, skip) + if typ == nil { + return fn(l) + } + l = &Literal{Type: typ, Val: l.Val, Neg: l.Neg} + if r := fn(l); r != nil { + return r + } + return l +} + +func (l *Literal) GoString() string { + return l.goString(0, "") +} + +func (l *Literal) goString(indent int, field string) string { + var neg string + if l.Neg { + neg = " Neg: true" + } + return fmt.Sprintf("%*s%sLiteral:%s\n%s\n%*sVal: %s", indent, "", field, + neg, l.Type.goString(indent+2, "Type: "), + indent+2, "", l.Val) +} + +// ExprList is a list of expressions, typically arguments to a +// function call in an expression. +type ExprList struct { + Exprs []AST +} + +func (el *ExprList) print(ps *printState) { + for i, e := range el.Exprs { + if i > 0 { + ps.writeString(", ") + } + e.print(ps) + } +} + +func (el *ExprList) Traverse(fn func(AST) bool) { + if fn(el) { + for _, e := range el.Exprs { + e.Traverse(fn) + } + } +} + +func (el *ExprList) Copy(fn func(AST) AST, skip func(AST) bool) AST { + if skip(el) { + return nil + } + exprs := make([]AST, len(el.Exprs)) + changed := false + for i, e := range el.Exprs { + ec := e.Copy(fn, skip) + if ec == nil { + exprs[i] = e + } else { + exprs[i] = ec + changed = true + } + } + if !changed { + return fn(el) + } + el = &ExprList{Exprs: exprs} + if r := fn(el); r != nil { + return r + } + return el +} + +func (el *ExprList) GoString() string { + return el.goString(0, "") +} + +func (el *ExprList) goString(indent int, field string) string { + if len(el.Exprs) == 0 { + return fmt.Sprintf("%*s%sExprList: nil", indent, "", field) + } + s := fmt.Sprintf("%*s%sExprList:", indent, "", field) + for i, e := range el.Exprs { + s += "\n" + s += e.goString(indent+2, fmt.Sprintf("%d: ", i)) + } + return s +} + +// InitializerList is an initializer list: an optional type with a +// list of expressions. +type InitializerList struct { + Type AST + Exprs AST +} + +func (il *InitializerList) print(ps *printState) { + if il.Type != nil { + il.Type.print(ps) + } + ps.writeByte('{') + il.Exprs.print(ps) + ps.writeByte('}') +} + +func (il *InitializerList) Traverse(fn func(AST) bool) { + if fn(il) { + il.Type.Traverse(fn) + il.Exprs.Traverse(fn) + } +} + +func (il *InitializerList) Copy(fn func(AST) AST, skip func(AST) bool) AST { + if skip(il) { + return nil + } + var typ AST + if il.Type != nil { + typ = il.Type.Copy(fn, skip) + } + exprs := il.Exprs.Copy(fn, skip) + if typ == nil && exprs == nil { + return fn(il) + } + if typ == nil { + typ = il.Type + } + if exprs == nil { + exprs = il.Exprs + } + il = &InitializerList{Type: typ, Exprs: exprs} + if r := fn(il); r != nil { + return r + } + return il +} + +func (il *InitializerList) GoString() string { + return il.goString(0, "") +} + +func (il *InitializerList) goString(indent int, field string) string { + var t string + if il.Type == nil { + t = fmt.Sprintf("%*sType: nil", indent+2, "") + } else { + t = il.Type.goString(indent+2, "Type: ") + } + return fmt.Sprintf("%*s%sInitializerList:\n%s\n%s", indent, "", field, + t, il.Exprs.goString(indent+2, "Exprs: ")) +} + +// DefaultArg holds a default argument for a local name. +type DefaultArg struct { + Num int + Arg AST +} + +func (da *DefaultArg) print(ps *printState) { + fmt.Fprintf(&ps.buf, "{default arg#%d}::", da.Num+1) + da.Arg.print(ps) +} + +func (da *DefaultArg) Traverse(fn func(AST) bool) { + if fn(da) { + da.Arg.Traverse(fn) + } +} + +func (da *DefaultArg) Copy(fn func(AST) AST, skip func(AST) bool) AST { + if skip(da) { + return nil + } + arg := da.Arg.Copy(fn, skip) + if arg == nil { + return fn(da) + } + da = &DefaultArg{Num: da.Num, Arg: arg} + if r := fn(da); r != nil { + return r + } + return da +} + +func (da *DefaultArg) GoString() string { + return da.goString(0, "") +} + +func (da *DefaultArg) goString(indent int, field string) string { + return fmt.Sprintf("%*s%sDefaultArg: Num: %d\n%s", indent, "", field, da.Num, + da.Arg.goString(indent+2, "Arg: ")) +} + +// Closure is a closure, or lambda expression. +type Closure struct { + Types []AST + Num int +} + +func (cl *Closure) print(ps *printState) { + ps.writeString("{lambda(") + for i, t := range cl.Types { + if i > 0 { + ps.writeString(", ") + } + t.print(ps) + } + ps.writeString(fmt.Sprintf(")#%d}", cl.Num+1)) +} + +func (cl *Closure) Traverse(fn func(AST) bool) { + if fn(cl) { + for _, t := range cl.Types { + t.Traverse(fn) + } + } +} + +func (cl *Closure) Copy(fn func(AST) AST, skip func(AST) bool) AST { + if skip(cl) { + return nil + } + types := make([]AST, len(cl.Types)) + changed := false + for i, t := range cl.Types { + tc := t.Copy(fn, skip) + if tc == nil { + types[i] = t + } else { + types[i] = tc + changed = true + } + } + if !changed { + return fn(cl) + } + cl = &Closure{Types: types, Num: cl.Num} + if r := fn(cl); r != nil { + return r + } + return cl +} + +func (cl *Closure) GoString() string { + return cl.goString(0, "") +} + +func (cl *Closure) goString(indent int, field string) string { + var types string + if len(cl.Types) == 0 { + types = fmt.Sprintf("%*sTypes: nil", indent+2, "") + } else { + types = fmt.Sprintf("%*sTypes:", indent+2, "") + for i, t := range cl.Types { + types += "\n" + types += t.goString(indent+4, fmt.Sprintf("%d: ", i)) + } + } + return fmt.Sprintf("%*s%sClosure: Num: %d\n%s", indent, "", field, cl.Num, types) +} + +// UnnamedType is an unnamed type, that just has an index. +type UnnamedType struct { + Num int +} + +func (ut *UnnamedType) print(ps *printState) { + ps.writeString(fmt.Sprintf("{unnamed type#%d}", ut.Num+1)) +} + +func (ut *UnnamedType) Traverse(fn func(AST) bool) { + fn(ut) +} + +func (ut *UnnamedType) Copy(fn func(AST) AST, skip func(AST) bool) AST { + if skip(ut) { + return nil + } + return fn(ut) +} + +func (ut *UnnamedType) GoString() string { + return ut.goString(0, "") +} + +func (ut *UnnamedType) goString(indent int, field string) string { + return fmt.Sprintf("%*s%sUnnamedType: Num: %d", indent, "", field, ut.Num) +} + +// Clone is a clone of a function, with a distinguishing suffix. +type Clone struct { + Base AST + Suffix string +} + +func (c *Clone) print(ps *printState) { + c.Base.print(ps) + ps.writeString(fmt.Sprintf(" [clone %s]", c.Suffix)) +} + +func (c *Clone) Traverse(fn func(AST) bool) { + if fn(c) { + c.Base.Traverse(fn) + } +} + +func (c *Clone) Copy(fn func(AST) AST, skip func(AST) bool) AST { + if skip(c) { + return nil + } + base := c.Base.Copy(fn, skip) + if base == nil { + return fn(c) + } + c = &Clone{Base: base, Suffix: c.Suffix} + if r := fn(c); r != nil { + return r + } + return c +} + +func (c *Clone) GoString() string { + return c.goString(0, "") +} + +func (c *Clone) goString(indent int, field string) string { + return fmt.Sprintf("%*s%sClone: Suffix: %s\n%s", indent, "", field, + c.Suffix, c.Base.goString(indent+2, "Base: ")) +} + +// Special is a special symbol, printed as a prefix plus another +// value. +type Special struct { + Prefix string + Val AST +} + +func (s *Special) print(ps *printState) { + ps.writeString(s.Prefix) + s.Val.print(ps) +} + +func (s *Special) Traverse(fn func(AST) bool) { + if fn(s) { + s.Val.Traverse(fn) + } +} + +func (s *Special) Copy(fn func(AST) AST, skip func(AST) bool) AST { + if skip(s) { + return nil + } + val := s.Val.Copy(fn, skip) + if val == nil { + return fn(s) + } + s = &Special{Prefix: s.Prefix, Val: val} + if r := fn(s); r != nil { + return r + } + return s +} + +func (s *Special) GoString() string { + return s.goString(0, "") +} + +func (s *Special) goString(indent int, field string) string { + return fmt.Sprintf("%*s%sSpecial: Prefix: %s\n%s", indent, "", field, + s.Prefix, s.Val.goString(indent+2, "Val: ")) +} + +// Special2 is like special, but uses two values. +type Special2 struct { + Prefix string + Val1 AST + Middle string + Val2 AST +} + +func (s *Special2) print(ps *printState) { + ps.writeString(s.Prefix) + s.Val1.print(ps) + ps.writeString(s.Middle) + s.Val2.print(ps) +} + +func (s *Special2) Traverse(fn func(AST) bool) { + if fn(s) { + s.Val1.Traverse(fn) + s.Val2.Traverse(fn) + } +} + +func (s *Special2) Copy(fn func(AST) AST, skip func(AST) bool) AST { + if skip(s) { + return nil + } + val1 := s.Val1.Copy(fn, skip) + val2 := s.Val2.Copy(fn, skip) + if val1 == nil && val2 == nil { + return fn(s) + } + if val1 == nil { + val1 = s.Val1 + } + if val2 == nil { + val2 = s.Val2 + } + s = &Special2{Prefix: s.Prefix, Val1: val1, Middle: s.Middle, Val2: val2} + if r := fn(s); r != nil { + return r + } + return s +} + +func (s *Special2) GoString() string { + return s.goString(0, "") +} + +func (s *Special2) goString(indent int, field string) string { + return fmt.Sprintf("%*s%sSpecial2: Prefix: %s\n%s\n%*sMiddle: %s\n%s", indent, "", field, + s.Prefix, s.Val1.goString(indent+2, "Val1: "), + indent+2, "", s.Middle, s.Val2.goString(indent+2, "Val2: ")) +} + +// Print the inner types. +func (ps *printState) printInner(prefixOnly bool) []AST { + var save []AST + var psave *[]AST + if prefixOnly { + psave = &save + } + for len(ps.inner) > 0 { + ps.printOneInner(psave) + } + return save +} + +// innerPrinter is an interface for types that can print themselves as +// inner types. +type innerPrinter interface { + printInner(*printState) +} + +// Print the most recent inner type. If save is not nil, only print +// prefixes. +func (ps *printState) printOneInner(save *[]AST) { + if len(ps.inner) == 0 { + panic("printOneInner called with no inner types") + } + ln := len(ps.inner) + a := ps.inner[ln-1] + ps.inner = ps.inner[:ln-1] + + if save != nil { + if _, ok := a.(*MethodWithQualifiers); ok { + *save = append(*save, a) + return + } + } + + if ip, ok := a.(innerPrinter); ok { + ip.printInner(ps) + } else { + a.print(ps) + } +} + +// isEmpty returns whether printing a will not print anything. +func (ps *printState) isEmpty(a AST) bool { + switch a := a.(type) { + case *ArgumentPack: + return len(a.Args) == 0 + case *ExprList: + return len(a.Exprs) == 0 + case *PackExpansion: + return a.Pack != nil && ps.isEmpty(a.Base) + default: + return false + } +} diff --git a/third_party/src/golang/demangle/ast_test.go b/third_party/src/golang/demangle/ast_test.go new file mode 100644 index 00000000..b5540616 --- /dev/null +++ b/third_party/src/golang/demangle/ast_test.go @@ -0,0 +1,42 @@ +// Copyright 2015 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package demangle + +import ( + "fmt" + "testing" +) + +func TestASTToString(t *testing.T) { + var tests = []struct { + input AST + want string + formatted string + }{ + { + &Qualified{Scope: &Name{Name: "s"}, Name: &Name{Name: "C"}}, + "s::C", + `Qualified: + Scope: s + Name: C`, + }, + { + &Typed{Name: &Name{Name: "v"}, Type: &BuiltinType{"int"}}, + "int v", + `Typed: + Name: v + Type: BuiltinType: int`, + }, + } + + for i, test := range tests { + if got := ASTToString(test.input); got != test.want { + t.Errorf("ASTToString of test %d == %s, want %s", i, test.input, test.want) + } + if got := fmt.Sprintf("%#v", test.input); got != test.formatted { + t.Errorf("Formatted test %d == %s, want %s", i, got, test.formatted) + } + } +} diff --git a/third_party/src/golang/demangle/c++filt.go b/third_party/src/golang/demangle/c++filt.go new file mode 100644 index 00000000..7ba817c9 --- /dev/null +++ b/third_party/src/golang/demangle/c++filt.go @@ -0,0 +1,144 @@ +// Copyright 2015 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build ignore + +// This is a program that works like the GNU c++filt program. +// It's here for testing purposes and as an example. + +package main + +import ( + "bufio" + "flag" + "fmt" + "io" + "os" + "strings" + "unicode" + + "github.com/ianlancetaylor/demangle" +) + +func flagUsage() { + usage(os.Stderr, 2) +} + +func usage(w io.Writer, status int) { + fmt.Fprintf(w, "Usage: %s [options] [mangled names]\n", os.Args[0]) + flag.CommandLine.SetOutput(w) + flag.PrintDefaults() + fmt.Fprintln(w, `Demangled names are displayed to stdout +If a name cannot be demangled it is just echoed to stdout. +If no names are provided on the command line, stdin is read.`) + os.Exit(status) +} + +var stripUnderscore = flag.Bool("_", false, "Ignore first leading underscore") +var noParams = flag.Bool("p", false, "Do not display function argument types") +var noVerbose = flag.Bool("i", false, "Do not show implementation details (if any)") +var help = flag.Bool("h", false, "Display help information") +var debug = flag.Bool("d", false, "Display debugging information for strings on command line") + +// Unimplemented c++filt flags: +// -n (opposite of -_) +// -t (demangle types) +// -s (set demangling style) +// -V (print version information) + +// Characters considered to be part of a symbol. +const symbolChars = "_$." + +func main() { + flag.Usage = func() { usage(os.Stderr, 1) } + flag.Parse() + + if *help { + usage(os.Stdout, 0) + } + + out := bufio.NewWriter(os.Stdout) + + if flag.NArg() > 0 { + for _, f := range flag.Args() { + if *debug { + a, err := demangle.ToAST(f, options()...) + if err != nil { + fmt.Fprintf(os.Stderr, "%s: %v\n", f, err) + } else { + fmt.Fprintf(out, "%#v\n", a) + } + } else { + doDemangle(out, f) + } + out.WriteByte('\n') + } + if err := out.Flush(); err != nil { + fmt.Fprintln(os.Stderr, err) + os.Exit(2) + } + return + } + + scanner := bufio.NewScanner(bufio.NewReader(os.Stdin)) + for scanner.Scan() { + line := scanner.Text() + start := -1 + for i, c := range line { + if unicode.IsLetter(c) || unicode.IsNumber(c) || strings.ContainsRune(symbolChars, c) { + if start < 0 { + start = i + } + } else { + if start >= 0 { + doDemangle(out, line[start:i]) + } + out.WriteRune(c) + start = -1 + } + } + if start >= 0 { + doDemangle(out, line[start:]) + start = -1 + } + out.WriteByte('\n') + if err := out.Flush(); err != nil { + fmt.Fprintln(os.Stderr, err) + os.Exit(2) + } + } +} + +// Demangle a string just as the GNU c++filt program does. +func doDemangle(out *bufio.Writer, name string) { + skip := 0 + if name[0] == '.' || name[0] == '$' { + skip++ + } + if *stripUnderscore && name[skip] == '_' { + skip++ + } + result := demangle.Filter(name[skip:], options()...) + if result == name[skip:] { + out.WriteString(name) + } else { + if name[0] == '.' { + out.WriteByte('.') + } + out.WriteString(result) + } +} + +// options returns the demangling options to use based on the command +// line flags. +func options() []demangle.Option { + var options []demangle.Option + if *noParams { + options = append(options, demangle.NoParams) + } + if !*noVerbose { + options = append(options, demangle.Verbose) + } + return options +} diff --git a/third_party/src/golang/demangle/demangle.go b/third_party/src/golang/demangle/demangle.go new file mode 100644 index 00000000..40a59131 --- /dev/null +++ b/third_party/src/golang/demangle/demangle.go @@ -0,0 +1,2327 @@ +// Copyright 2015 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package demangle defines functions that demangle GCC/LLVM C++ symbol names. +// This package recognizes names that were mangled according to the C++ ABI +// defined at http://codesourcery.com/cxx-abi/. +package demangle + +import ( + "errors" + "fmt" + "strings" +) + +// ErrNotMangledName is returned by CheckedDemangle if the string does +// not appear to be a C++ symbol name. +var ErrNotMangledName = errors.New("not a C++ mangled name") + +// Option is the type of demangler options. +type Option int + +const ( + // The NoParams option disables demangling of function parameters. + NoParams Option = iota + + // The NoTemplateParams option disables demangling of template parameters. + NoTemplateParams + + // The NoClones option disables inclusion of clone suffixes. + // NoParams implies NoClones. + NoClones + + // The Verbose option turns on more verbose demangling. + Verbose +) + +// Filter demangles a C++ symbol name, returning the human-readable C++ name. +// If any error occurs during demangling, the input string is returned. +func Filter(name string, options ...Option) string { + ret, err := ToString(name, options...) + if err != nil { + return name + } + return ret +} + +// ToString demangles a C++ symbol name, returning human-readable C++ +// name or an error. +// If the name does not appear to be a C++ symbol name at all, the +// error will be ErrNotMangledName. +func ToString(name string, options ...Option) (string, error) { + a, err := ToAST(name, options...) + if err != nil { + return "", err + } + return ASTToString(a, options...), nil +} + +// ToAST demangles a C++ symbol name into an abstract syntax tree +// representing the symbol. +// If the NoParams option is passed, and the name has a function type, +// the parameter types are not demangled. +// If the name does not appear to be a C++ symbol name at all, the +// error will be ErrNotMangledName. +func ToAST(name string, options ...Option) (AST, error) { + if strings.HasPrefix(name, "_Z") { + a, err := doDemangle(name[2:], options...) + return a, adjustErr(err, 2) + } + + const prefix = "_GLOBAL_" + if strings.HasPrefix(name, prefix) { + // The standard demangler ignores NoParams for global + // constructors. We are compatible. + i := 0 + for i < len(options) { + if options[i] == NoParams { + options = append(options[:i], options[i+1:]...) + } else { + i++ + } + } + a, err := globalCDtorName(name[len(prefix):], options...) + return a, adjustErr(err, len(prefix)) + } + + return nil, ErrNotMangledName +} + +// globalCDtorName demangles a global constructor/destructor symbol name. +// The parameter is the string following the "_GLOBAL_" prefix. +func globalCDtorName(name string, options ...Option) (AST, error) { + if len(name) < 4 { + return nil, ErrNotMangledName + } + switch name[0] { + case '.', '_', '$': + default: + return nil, ErrNotMangledName + } + + var ctor bool + switch name[1] { + case 'I': + ctor = true + case 'D': + ctor = false + default: + return nil, ErrNotMangledName + } + + if name[2] != '_' { + return nil, ErrNotMangledName + } + + if !strings.HasPrefix(name[3:], "_Z") { + return &GlobalCDtor{Ctor: ctor, Key: &Name{Name: name}}, nil + } else { + a, err := doDemangle(name[5:], options...) + if err != nil { + return nil, adjustErr(err, 5) + } + return &GlobalCDtor{Ctor: ctor, Key: a}, nil + } +} + +// The doDemangle function is the entry point into the demangler proper. +func doDemangle(name string, options ...Option) (ret AST, err error) { + // When the demangling routines encounter an error, they panic + // with a value of type demangleErr. + defer func() { + if r := recover(); r != nil { + if de, ok := r.(demangleErr); ok { + ret = nil + err = de + return + } + panic(r) + } + }() + + params := true + clones := true + verbose := false + for _, o := range options { + switch o { + case NoParams: + params = false + clones = false + case NoTemplateParams: + // This is a valid option but only affect printing of the AST. + case NoClones: + clones = false + case Verbose: + verbose = true + default: + return nil, fmt.Errorf("unrecognized demangler option %v", o) + } + } + + st := &state{str: name, verbose: verbose} + a := st.encoding(params) + + // Accept a clone suffix. + if clones { + for len(st.str) > 1 && st.str[0] == '.' && (isLower(st.str[1]) || st.str[1] == '_' || isDigit(st.str[1])) { + a = st.cloneSuffix(a) + } + } + + if clones && len(st.str) > 0 { + st.fail("unparsed characters at end of mangled name") + } + + return a, nil +} + +// A state holds the current state of demangling a string. +type state struct { + str string // remainder of string to demangle + verbose bool // whether to use verbose demangling + off int // offset of str within original string + subs substitutions // substitutions + templates []*Template // templates being processed +} + +// copy returns a copy of the current state. +func (st *state) copy() *state { + n := new(state) + *n = *st + return n +} + +// fail panics with demangleErr, to be caught in doDemangle. +func (st *state) fail(err string) { + panic(demangleErr{err: err, off: st.off}) +} + +// failEarlier is like fail, but decrements the offset to indicate +// that the point of failure occurred earlier in the string. +func (st *state) failEarlier(err string, dec int) { + if st.off < dec { + panic("internal error") + } + panic(demangleErr{err: err, off: st.off - dec}) +} + +// advance advances the current string offset. +func (st *state) advance(add int) { + if len(st.str) < add { + panic("internal error") + } + st.str = st.str[add:] + st.off += add +} + +// checkChar requires that the next character in the string be c, and +// advances past it. +func (st *state) checkChar(c byte) { + if len(st.str) == 0 || st.str[0] != c { + panic("internal error") + } + st.advance(1) +} + +// A demangleErr is an error at a specific offset in the mangled +// string. +type demangleErr struct { + err string + off int +} + +// Error implements the builtin error interface for demangleErr. +func (de demangleErr) Error() string { + return fmt.Sprintf("%s at %d", de.err, de.off) +} + +// adjustErr adjusts the position of err, if it is a demangleErr, +// and returns err. +func adjustErr(err error, adj int) error { + if err == nil { + return nil + } + if de, ok := err.(demangleErr); ok { + de.off += adj + return de + } + return err +} + +// encoding ::= <(function) name> +// <(data) name> +// +func (st *state) encoding(params bool) AST { + if len(st.str) < 1 { + st.fail("expected encoding") + } + + if st.str[0] == 'G' || st.str[0] == 'T' { + return st.specialName() + } + + a := st.name() + a = simplify(a) + + if !params { + // Don't demangle the parameters. + + // Strip CV-qualifiers, as they apply to the 'this' + // parameter, and are not output by the standard + // demangler without parameters. + if mwq, ok := a.(*MethodWithQualifiers); ok { + a = mwq.Method + } + + // If this is a local name, there may be CV-qualifiers + // on the name that really apply to the top level, and + // therefore must be discarded when discarding + // parameters. This can happen when parsing a class + // that is local to a function. + if q, ok := a.(*Qualified); ok && q.LocalName { + p := &q.Name + if da, ok := (*p).(*DefaultArg); ok { + p = &da.Arg + } + if mwq, ok := (*p).(*MethodWithQualifiers); ok { + *p = mwq.Method + } + } + + return a + } + + if len(st.str) == 0 || st.str[0] == 'E' { + // There are no parameters--this is a data symbol, not + // a function symbol. + return a + } + + check := a + mwq, _ := check.(*MethodWithQualifiers) + if mwq != nil { + check = mwq.Method + } + template, _ := check.(*Template) + if template != nil { + st.templates = append(st.templates, template) + } + + ft := st.bareFunctionType(hasReturnType(a)) + + if template != nil { + st.templates = st.templates[:len(st.templates)-1] + } + + ft = simplify(ft) + + // Any top-level qualifiers belong to the function type. + if mwq != nil { + a = mwq.Method + mwq.Method = ft + ft = mwq + } + if q, ok := a.(*Qualified); ok && q.LocalName { + p := &q.Name + if da, ok := (*p).(*DefaultArg); ok { + p = &da.Arg + } + if mwq, ok := (*p).(*MethodWithQualifiers); ok { + *p = mwq.Method + mwq.Method = ft + ft = mwq + } + } + + return &Typed{Name: a, Type: ft} +} + +// hasReturnType returns whether the mangled form of a will have a +// return type. +func hasReturnType(a AST) bool { + switch a := a.(type) { + case *Template: + return !isCDtorConversion(a.Name) + case *TypeWithQualifiers: + return hasReturnType(a.Base) + case *MethodWithQualifiers: + return hasReturnType(a.Method) + default: + return false + } +} + +// isCDtorConversion returns when an AST is a constructor, a +// destructor, or a conversion operator. +func isCDtorConversion(a AST) bool { + switch a := a.(type) { + case *Qualified: + return isCDtorConversion(a.Name) + case *Constructor, *Destructor, *Cast: + return true + default: + return false + } +} + +// ::= B +func (st *state) taggedName(a AST) AST { + for len(st.str) > 0 && st.str[0] == 'B' { + st.advance(1) + tag := st.sourceName() + a = &TaggedName{Name: a, Tag: tag} + } + return a +} + +// ::= +// ::= +// ::= +// ::= +// +// ::= +// ::= St +// +// ::= +// ::= +func (st *state) name() AST { + if len(st.str) < 1 { + st.fail("expected name") + } + switch st.str[0] { + case 'N': + return st.nestedName() + case 'Z': + return st.localName() + case 'U': + return st.unqualifiedName() + case 'S': + if len(st.str) < 2 { + st.advance(1) + st.fail("expected substitution index") + } + var a AST + subst := false + if st.str[1] == 't' { + st.advance(2) + a = st.unqualifiedName() + a = &Qualified{Scope: &Name{Name: "std"}, Name: a, LocalName: false} + } else { + a = st.substitution(false) + subst = true + } + if len(st.str) > 0 && st.str[0] == 'I' { + // This can only happen if we saw + // and are about to see + // . is a + // substitution candidate if it did not come from a + // substitution. + if !subst { + st.subs.add(a) + } + args := st.templateArgs() + a = &Template{Name: a, Args: args} + } + return a + + default: + a := st.unqualifiedName() + if len(st.str) > 0 && st.str[0] == 'I' { + st.subs.add(a) + args := st.templateArgs() + a = &Template{Name: a, Args: args} + } + return a + } +} + +// ::= N [] [] E +// ::= N [] [] E +func (st *state) nestedName() AST { + st.checkChar('N') + q := st.cvQualifiers() + r := st.refQualifier() + a := st.prefix() + if len(q) > 0 || r != "" { + a = &MethodWithQualifiers{Method: a, Qualifiers: q, RefQualifier: r} + } + if len(st.str) == 0 || st.str[0] != 'E' { + st.fail("expected E after nested name") + } + st.advance(1) + return a +} + +// ::= +// ::= +// ::= +// ::= +// ::= +// ::= +// +// ::= <(template) unqualified-name> +// ::= +// ::= +// +// ::= Dt E +// ::= DT E +func (st *state) prefix() AST { + var a AST + + // The last name seen, for a constructor/destructor. + var last AST + + getLast := func(a AST) AST { + for { + if t, ok := a.(*Template); ok { + a = t.Name + } else if q, ok := a.(*Qualified); ok { + a = q.Name + } else if t, ok := a.(*TaggedName); ok { + a = t.Name + } else { + return a + } + } + } + + isCast := false + for { + if len(st.str) == 0 { + st.fail("expected prefix") + } + var next AST + + c := st.str[0] + if isDigit(c) || isLower(c) || c == 'U' || c == 'L' { + next = st.unqualifiedName() + if _, ok := next.(*Cast); ok { + isCast = true + } + } else { + switch st.str[0] { + case 'C': + if len(st.str) < 2 { + st.fail("expected constructor type") + } + if last == nil { + st.fail("constructor before name is seen") + } + st.advance(2) + next = &Constructor{Name: getLast(last)} + case 'D': + if len(st.str) > 1 && (st.str[1] == 'T' || st.str[1] == 't') { + next = st.demangleType(false) + } else { + if len(st.str) < 2 { + st.fail("expected destructor type") + } + if last == nil { + st.fail("destructor before name is seen") + } + st.advance(2) + next = &Destructor{Name: getLast(last)} + } + case 'S': + next = st.substitution(true) + case 'I': + if a == nil { + st.fail("unexpected template arguments") + } + var args []AST + args = st.templateArgs() + tmpl := &Template{Name: a, Args: args} + if isCast { + st.setTemplate(a, tmpl) + isCast = false + } + a = nil + next = tmpl + case 'T': + next = st.templateParam() + case 'E': + return a + case 'M': + if a == nil { + st.fail("unexpected lambda initializer") + } + // This is the initializer scope for a + // lambda. We don't need to record + // it. The normal code will treat the + // variable has a type scope, which + // gives appropriate output. + st.advance(1) + continue + default: + st.fail("unrecognized letter in prefix") + } + } + last = next + if a == nil { + a = next + } else { + a = &Qualified{Scope: a, Name: next, LocalName: false} + } + + if c != 'S' && (len(st.str) == 0 || st.str[0] != 'E') { + st.subs.add(a) + } + } +} + +// ::= +// ::= +// ::= +// ::= +// +// ::= L +func (st *state) unqualifiedName() AST { + if len(st.str) < 1 { + st.fail("expected unqualified name") + } + var a AST + c := st.str[0] + if isDigit(c) { + a = st.sourceName() + } else if isLower(c) { + a, _ = st.operatorName(false) + if op, ok := a.(*Operator); ok && op.Name == `operator"" ` { + n := st.sourceName() + a = &Unary{Op: op, Expr: n, Suffix: false, SizeofType: false} + } + } else { + switch c { + case 'C', 'D': + st.fail("constructor/destructor not in nested name") + case 'L': + st.advance(1) + a = st.sourceName() + a = st.discriminator(a) + case 'U': + if len(st.str) < 2 { + st.advance(1) + st.fail("expected closure or unnamed type") + } + c := st.str[1] + switch c { + case 'l': + a = st.closureTypeName() + case 't': + a = st.unnamedTypeName() + default: + st.advance(1) + st.fail("expected closure or unnamed type") + } + default: + st.fail("expected unqualified name") + } + } + + if len(st.str) > 0 && st.str[0] == 'B' { + a = st.taggedName(a) + } + + return a +} + +// ::= <(positive length) number> +// identifier ::= <(unqualified source code identifier)> +func (st *state) sourceName() AST { + val := st.number() + if val < 0 { + st.fail("unexpected negative number") + } + if len(st.str) < val { + st.fail("not enough characters for identifier") + } + id := st.str[:val] + st.advance(val) + + // Look for GCC encoding of anonymous namespace, and make it + // more friendly. + const anonPrefix = "_GLOBAL_" + if strings.HasPrefix(id, anonPrefix) && len(id) > len(anonPrefix)+2 { + c1 := id[len(anonPrefix)] + c2 := id[len(anonPrefix)+1] + if (c1 == '.' || c1 == '_' || c1 == '$') && c2 == 'N' { + id = "(anonymous namespace)" + } + } + + n := &Name{Name: id} + return n +} + +// number ::= [n] <(non-negative decimal integer)> +func (st *state) number() int { + neg := false + if len(st.str) > 0 && st.str[0] == 'n' { + neg = true + st.advance(1) + } + if len(st.str) == 0 || !isDigit(st.str[0]) { + st.fail("missing number") + } + val := 0 + for len(st.str) > 0 && isDigit(st.str[0]) { + // Number picked to ensure we can't overflow with 32-bit int. + // Any very large number here is bogus. + if val >= 0x80000000/10-10 { + st.fail("numeric overflow") + } + val = val*10 + int(st.str[0]-'0') + st.advance(1) + } + if neg { + val = -val + } + return val +} + +// An operator is the demangled name, and the number of arguments it +// takes in an expression. +type operator struct { + name string + args int +} + +// The operators map maps the mangled operator names to information +// about them. +var operators = map[string]operator{ + "aN": {"&=", 2}, + "aS": {"=", 2}, + "aa": {"&&", 2}, + "ad": {"&", 1}, + "an": {"&", 2}, + "at": {"alignof ", 1}, + "az": {"alignof ", 1}, + "cc": {"const_cast", 2}, + "cl": {"()", 2}, + "cm": {",", 2}, + "co": {"~", 1}, + "dV": {"/=", 2}, + "da": {"delete[] ", 1}, + "dc": {"dynamic_cast", 2}, + "de": {"*", 1}, + "dl": {"delete ", 1}, + "ds": {".*", 2}, + "dt": {".", 2}, + "dv": {"/", 2}, + "eO": {"^=", 2}, + "eo": {"^", 2}, + "eq": {"==", 2}, + "ge": {">=", 2}, + "gs": {"::", 1}, + "gt": {">", 2}, + "ix": {"[]", 2}, + "lS": {"<<=", 2}, + "le": {"<=", 2}, + "li": {`operator"" `, 1}, + "ls": {"<<", 2}, + "lt": {"<", 2}, + "mI": {"-=", 2}, + "mL": {"*=", 2}, + "mi": {"-", 2}, + "ml": {"*", 2}, + "mm": {"--", 1}, + "na": {"new[]", 3}, + "ne": {"!=", 2}, + "ng": {"-", 1}, + "nt": {"!", 1}, + "nw": {"new", 3}, + "oR": {"|=", 2}, + "oo": {"||", 2}, + "or": {"|", 2}, + "pL": {"+=", 2}, + "pl": {"+", 2}, + "pm": {"->*", 2}, + "pp": {"++", 1}, + "ps": {"+", 1}, + "pt": {"->", 2}, + "qu": {"?", 3}, + "rM": {"%=", 2}, + "rS": {">>=", 2}, + "rc": {"reinterpret_cast", 2}, + "rm": {"%", 2}, + "rs": {">>", 2}, + "sc": {"static_cast", 2}, + "st": {"sizeof ", 1}, + "sz": {"sizeof ", 1}, + "tr": {"throw", 0}, + "tw": {"throw ", 1}, +} + +// operator_name ::= many different two character encodings. +// ::= cv +// ::= v +// +// We need to know whether we are in an expression because it affects +// how we handle template parameters in the type of a cast operator. +func (st *state) operatorName(inExpression bool) (AST, int) { + if len(st.str) < 2 { + st.fail("missing operator code") + } + code := st.str[:2] + st.advance(2) + if code[0] == 'v' && isDigit(code[1]) { + name := st.sourceName() + return &Operator{Name: name.(*Name).Name}, int(code[1] - '0') + } else if code == "cv" { + // Push a nil on templates to indicate that template + // parameters will have their template filled in + // later. + if !inExpression { + st.templates = append(st.templates, nil) + } + + t := st.demangleType(!inExpression) + + if !inExpression { + st.templates = st.templates[:len(st.templates)-1] + } + + return &Cast{To: t}, 1 + } else if op, ok := operators[code]; ok { + return &Operator{Name: op.name}, op.args + } else { + st.failEarlier("unrecognized operator code", 2) + panic("not reached") + } +} + +// ::= Z <(function) encoding> E <(entity) name> [] +// ::= Z <(function) encoding> E s [] +// ::= Z <(function) encoding> E d [ number>] _ +func (st *state) localName() AST { + st.checkChar('Z') + fn := st.encoding(true) + if len(st.str) == 0 || st.str[0] != 'E' { + st.fail("expected E after local name") + } + st.advance(1) + if len(st.str) > 0 && st.str[0] == 's' { + st.advance(1) + var n AST = &Name{Name: "string literal"} + n = st.discriminator(n) + return &Qualified{Scope: fn, Name: n, LocalName: true} + } else { + num := -1 + if len(st.str) > 0 && st.str[0] == 'd' { + // Default argument scope. + st.advance(1) + num = st.compactNumber() + } + var n AST = st.name() + n = st.discriminator(n) + if num >= 0 { + n = &DefaultArg{Num: num, Arg: n} + } + return &Qualified{Scope: fn, Name: n, LocalName: true} + } +} + +// Parse a Java resource special-name. +func (st *state) javaResource() AST { + off := st.off + ln := st.number() + if ln <= 1 { + st.failEarlier("java resource length less than 1", st.off-off) + } + if len(st.str) == 0 || st.str[0] != '_' { + st.fail("expected _ after number") + } + st.advance(1) + ln-- + if len(st.str) < ln { + st.fail("not enough characters for java resource length") + } + str := st.str[:ln] + final := "" + st.advance(ln) + for i := 0; i < len(str); i++ { + if str[i] != '$' { + final += string(str[i]) + } else { + if len(str) <= i+1 { + st.failEarlier("java resource escape at end of string", 1) + } + i++ + r, ok := map[byte]string{ + 'S': "/", + '_': ".", + '$': "$", + }[str[i]] + if !ok { + st.failEarlier("unrecognized java resource escape", ln-i-1) + } + final += r + } + } + return &Special{Prefix: "java resource ", Val: &Name{Name: final}} +} + +// ::= TV +// ::= TT +// ::= TI +// ::= TS +// ::= GV <(object) name> +// ::= T <(base) encoding> +// ::= Tc <(base) encoding> +// Also g++ extensions: +// ::= TC <(offset) number> _ <(base) type> +// ::= TF +// ::= TJ +// ::= GR +// ::= GA +// ::= Gr +// ::= GTt +// ::= GTn +func (st *state) specialName() AST { + if st.str[0] == 'T' { + st.advance(1) + if len(st.str) == 0 { + st.fail("expected special name code") + } + c := st.str[0] + st.advance(1) + switch c { + case 'V': + t := st.demangleType(false) + return &Special{Prefix: "vtable for ", Val: t} + case 'T': + t := st.demangleType(false) + return &Special{Prefix: "VTT for ", Val: t} + case 'I': + t := st.demangleType(false) + return &Special{Prefix: "typeinfo for ", Val: t} + case 'S': + t := st.demangleType(false) + return &Special{Prefix: "typeinfo name for ", Val: t} + case 'h': + st.callOffset('h') + v := st.encoding(true) + return &Special{Prefix: "non-virtual thunk to ", Val: v} + case 'v': + st.callOffset('v') + v := st.encoding(true) + return &Special{Prefix: "virtual thunk to ", Val: v} + case 'c': + st.callOffset(0) + st.callOffset(0) + v := st.encoding(true) + return &Special{Prefix: "covariant return thunk to ", Val: v} + case 'C': + derived := st.demangleType(false) + off := st.off + offset := st.number() + if offset < 0 { + st.failEarlier("expected positive offset", st.off-off) + } + if len(st.str) == 0 || st.str[0] != '_' { + st.fail("expected _ after number") + } + st.advance(1) + base := st.demangleType(false) + return &Special2{Prefix: "construction vtable for ", Val1: base, Middle: "-in-", Val2: derived} + case 'F': + t := st.demangleType(false) + return &Special{Prefix: "typeinfo fn for ", Val: t} + case 'J': + t := st.demangleType(false) + return &Special{Prefix: "java Class for ", Val: t} + case 'H': + n := st.name() + return &Special{Prefix: "TLS init function for ", Val: n} + case 'W': + n := st.name() + return &Special{Prefix: "TLS wrapper function for ", Val: n} + default: + st.fail("unrecognized special T name code") + panic("not reached") + } + } else { + st.checkChar('G') + if len(st.str) == 0 { + st.fail("expected special name code") + } + c := st.str[0] + st.advance(1) + switch c { + case 'V': + n := st.name() + return &Special{Prefix: "guard variable for ", Val: n} + case 'R': + n := st.name() + i := st.number() + return &Special{Prefix: fmt.Sprintf("reference temporary #%d for ", i), Val: n} + case 'A': + v := st.encoding(true) + return &Special{Prefix: "hidden alias for ", Val: v} + case 'T': + if len(st.str) == 0 { + st.fail("expected special GT name code") + } + c := st.str[0] + st.advance(1) + v := st.encoding(true) + switch c { + case 'n': + return &Special{Prefix: "non-transaction clone for ", Val: v} + default: + // The proposal is that different + // letters stand for different types + // of transactional cloning. Treat + // them all the same for now. + fallthrough + case 't': + return &Special{Prefix: "transaction clone for ", Val: v} + } + case 'r': + return st.javaResource() + default: + st.fail("unrecognized special G name code") + panic("not reached") + } + } +} + +// ::= h _ +// ::= v _ +// +// ::= <(offset) number> +// +// ::= <(offset) number> _ <(virtual offset) number> +// +// The c parameter, if not 0, is a character we just read which is the +// start of the . +// +// We don't display the offset information anywhere. +func (st *state) callOffset(c byte) { + if c == 0 { + if len(st.str) == 0 { + st.fail("missing call offset") + } + c = st.str[0] + st.advance(1) + } + switch c { + case 'h': + st.number() + case 'v': + st.number() + if len(st.str) == 0 || st.str[0] != '_' { + st.fail("expected _ after number") + } + st.advance(1) + st.number() + default: + st.failEarlier("unrecognized call offset code", 1) + } + if len(st.str) == 0 || st.str[0] != '_' { + st.fail("expected _ after call offset") + } + st.advance(1) +} + +// builtinTypes maps the type letter to the type name. +var builtinTypes = map[byte]string{ + 'a': "signed char", + 'b': "bool", + 'c': "char", + 'd': "double", + 'e': "long double", + 'f': "float", + 'g': "__float128", + 'h': "unsigned char", + 'i': "int", + 'j': "unsigned int", + 'l': "long", + 'm': "unsigned long", + 'n': "__int128", + 'o': "unsigned __int128", + 's': "short", + 't': "unsigned short", + 'v': "void", + 'w': "wchar_t", + 'x': "long long", + 'y': "unsigned long long", + 'z': "...", +} + +// ::= +// ::= +// ::= +// ::= +// ::= +// ::= +// ::= +// ::= +// ::= +// ::= P +// ::= R +// ::= O (C++0x) +// ::= C +// ::= G +// ::= U +// +// ::= various one letter codes +// ::= u +// +// The isCast parameter is for a rather hideous parse. When we see a +// tempate-param followed by a template-args, we need to decide +// whether we have a template-param or a template-template-param. +// Normally it is template-template-param, meaning that we pick up the +// template arguments here. But, if we are parsing the type for a +// cast operator, then the only way this can be +// template-template-param is if there is another set of template-args +// immediately after this set. That would look like this: +// +// +// -> +// -> +// -> +// -> +// -> +// -> cv +// -> cv +// +// Otherwise, we have this derivation: +// +// +// -> +// -> +// -> +// -> +// -> +// -> cv +// -> cv +// +// in which the template-args are actually part of the prefix. For +// the special case where this arises, we pass in isCast as true. +// This function is then responsible for checking whether we see +// but there is not another following +// . In that case, we reset the parse and just return +// the . +func (st *state) demangleType(isCast bool) AST { + if len(st.str) == 0 { + st.fail("expected type") + } + + addSubst := true + + q := st.cvQualifiers() + if len(q) > 0 { + if len(st.str) == 0 { + st.fail("expected type") + } + + // CV-qualifiers before a function type apply to + // 'this', so avoid adding the unqualified function + // type to the substitution list. + if st.str[0] == 'F' { + addSubst = false + } + } + + var ret AST + + // Use correct substitution for a template parameter. + var sub AST + + if btype, ok := builtinTypes[st.str[0]]; ok { + ret = &BuiltinType{Name: btype} + st.advance(1) + if len(q) > 0 { + ret = &TypeWithQualifiers{Base: ret, Qualifiers: q} + st.subs.add(ret) + } + return ret + } + c := st.str[0] + switch c { + case 'u': + st.advance(1) + ret = st.sourceName() + case 'F': + ret = st.functionType() + case 'N', 'Z', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + ret = st.name() + case 'A': + ret = st.arrayType(isCast) + case 'M': + ret = st.pointerToMemberType(isCast) + case 'T': + ret = st.templateParam() + if len(st.str) > 0 && st.str[0] == 'I' { + // See the function comment to explain this. + if !isCast { + st.subs.add(ret) + args := st.templateArgs() + ret = &Template{Name: ret, Args: args} + } else { + save := st.copy() + + var args []AST + failed := false + func() { + defer func() { + if r := recover(); r != nil { + if _, ok := r.(demangleErr); ok { + failed = true + } else { + panic(r) + } + } + }() + + args = st.templateArgs() + }() + + if !failed && len(st.str) > 0 && st.str[0] == 'I' { + st.subs.add(ret) + ret = &Template{Name: ret, Args: args} + } else { + // Reset back to before we started + // reading the template arguments. + // They will be read again by + // st.prefix. + *st = *save + } + } + } + case 'S': + // If this is a special substitution, then it + // is the start of . + var c2 byte + if len(st.str) > 1 { + c2 = st.str[1] + } + if isDigit(c2) || c2 == '_' || isUpper(c2) { + ret = st.substitution(false) + if len(st.str) == 0 || st.str[0] != 'I' { + addSubst = false + } else { + args := st.templateArgs() + ret = &Template{Name: ret, Args: args} + } + } else { + ret = st.name() + // This substitution is not itself a + // substitution candidate, unless template + // arguments were added. + if ret == subAST[c2] || ret == verboseAST[c2] { + addSubst = false + } + } + case 'O', 'P', 'R', 'C', 'G': + st.advance(1) + t := st.demangleType(isCast) + switch c { + case 'O': + ret = &RvalueReferenceType{Base: t} + case 'P': + ret = &PointerType{Base: t} + case 'R': + ret = &ReferenceType{Base: t} + case 'C': + ret = &ComplexType{Base: t} + case 'G': + ret = &ImaginaryType{Base: t} + } + case 'U': + st.advance(1) + n := st.sourceName() + if len(st.str) > 0 && st.str[0] == 'I' { + args := st.templateArgs() + n = &Template{Name: n, Args: args} + } + t := st.demangleType(isCast) + ret = &VendorQualifier{Qualifier: n, Type: t} + case 'D': + st.advance(1) + if len(st.str) == 0 { + st.fail("expected D code for type") + } + addSubst = false + c2 := st.str[0] + st.advance(1) + switch c2 { + case 'T', 't': + // decltype(expression) + ret = st.expression() + if len(st.str) == 0 || st.str[0] != 'E' { + st.fail("expected E after expression in type") + } + st.advance(1) + ret = &Decltype{Expr: ret} + addSubst = true + + case 'p': + t := st.demangleType(isCast) + pack := st.findArgumentPack(t) + ret = &PackExpansion{Base: t, Pack: pack} + addSubst = true + + case 'a': + ret = &Name{Name: "auto"} + + case 'f': + ret = &BuiltinType{Name: "decimal32"} + case 'd': + ret = &BuiltinType{Name: "decimal64"} + case 'e': + ret = &BuiltinType{Name: "decimal128"} + case 'h': + ret = &BuiltinType{Name: "half"} + case 's': + ret = &BuiltinType{Name: "char16_t"} + case 'i': + ret = &BuiltinType{Name: "char32_t"} + case 'n': + ret = &BuiltinType{Name: "decltype(nullptr)"} + + case 'F': + accum := false + if len(st.str) > 0 && isDigit(st.str[0]) { + accum = true + // We don't care about the bits. + _ = st.number() + } + base := st.demangleType(isCast) + if len(st.str) > 0 && isDigit(st.str[0]) { + // We don't care about the bits. + st.number() + } + sat := false + if len(st.str) > 0 { + if st.str[0] == 's' { + sat = true + } + st.advance(1) + } + ret = &FixedType{Base: base, Accum: accum, Sat: sat} + + case 'v': + ret = st.vectorType(isCast) + addSubst = true + + default: + st.fail("unrecognized D code in type") + } + + default: + st.fail("unrecognized type code") + } + + if addSubst { + if sub != nil { + st.subs.add(sub) + } else { + st.subs.add(ret) + } + } + + if len(q) > 0 { + if _, ok := ret.(*FunctionType); ok { + ret = &MethodWithQualifiers{Method: ret, Qualifiers: q, RefQualifier: ""} + } else if mwq, ok := ret.(*MethodWithQualifiers); ok { + // Merge adjacent qualifiers. This case + // happens with a function with a trailing + // ref-qualifier. + mwq.Qualifiers = mergeQualifiers(q, mwq.Qualifiers) + } else { + // Merge adjacent qualifiers. This case + // happens with multi-dimensional array types. + if qsub, ok := ret.(*TypeWithQualifiers); ok { + q = mergeQualifiers(q, qsub.Qualifiers) + ret = qsub.Base + } + ret = &TypeWithQualifiers{Base: ret, Qualifiers: q} + } + st.subs.add(ret) + } + + return ret +} + +// mergeQualifiers merges two qualifer lists into one. +func mergeQualifiers(q1, q2 Qualifiers) Qualifiers { + m := make(map[string]bool) + for _, qual := range q1 { + m[qual] = true + } + for _, qual := range q2 { + if !m[qual] { + q1 = append(q1, qual) + m[qual] = true + } + } + return q1 +} + +// qualifiers maps from the character used in the mangled name to the +// string to print. +var qualifiers = map[byte]string{ + 'r': "restrict", + 'V': "volatile", + 'K': "const", +} + +// ::= [r] [V] [K] +func (st *state) cvQualifiers() Qualifiers { + var q Qualifiers + for len(st.str) > 0 { + if qv, ok := qualifiers[st.str[0]]; ok { + q = append([]string{qv}, q...) + st.advance(1) + } else if len(st.str) > 1 && st.str[:2] == "Dx" { + q = append([]string{"transaction_safe"}, q...) + st.advance(2) + } else { + break + } + } + return q +} + +// ::= R +// ::= O +func (st *state) refQualifier() string { + if len(st.str) > 0 { + switch st.str[0] { + case 'R': + st.advance(1) + return "&" + case 'O': + st.advance(1) + return "&&" + } + } + return "" +} + +// + +func (st *state) parmlist() []AST { + var ret []AST + for { + if len(st.str) < 1 { + break + } + if st.str[0] == 'E' || st.str[0] == '.' { + break + } + if (st.str[0] == 'R' || st.str[0] == 'O') && len(st.str) > 1 && st.str[1] == 'E' { + // This is a function ref-qualifier. + break + } + ptype := st.demangleType(false) + ret = append(ret, ptype) + } + + // There should always be at least one type. A function that + // takes no arguments will have a single parameter type + // "void". + if len(ret) == 0 { + st.fail("expected at least one type in type list") + } + + // Omit a single parameter type void. + if len(ret) == 1 { + if bt, ok := ret[0].(*BuiltinType); ok && bt.Name == "void" { + ret = nil + } + } + + return ret +} + +// ::= F [Y] [] E +func (st *state) functionType() AST { + st.checkChar('F') + if len(st.str) > 0 && st.str[0] == 'Y' { + // Function has C linkage. We don't print this. + st.advance(1) + } + ret := st.bareFunctionType(true) + r := st.refQualifier() + if r != "" { + ret = &MethodWithQualifiers{Method: ret, Qualifiers: nil, RefQualifier: r} + } + if len(st.str) == 0 || st.str[0] != 'E' { + st.fail("expected E after function type") + } + st.advance(1) + return ret +} + +// ::= [J]+ +func (st *state) bareFunctionType(hasReturnType bool) AST { + if len(st.str) > 0 && st.str[0] == 'J' { + hasReturnType = true + st.advance(1) + } + var returnType AST + if hasReturnType { + returnType = st.demangleType(false) + } + types := st.parmlist() + return &FunctionType{Return: returnType, Args: types} +} + +// ::= A <(positive dimension) number> _ <(element) type> +// ::= A [<(dimension) expression>] _ <(element) type> +func (st *state) arrayType(isCast bool) AST { + st.checkChar('A') + + if len(st.str) == 0 { + st.fail("missing array dimension") + } + + var dim AST + if st.str[0] == '_' { + dim = &Name{Name: ""} + } else if isDigit(st.str[0]) { + i := 1 + for len(st.str) > i && isDigit(st.str[i]) { + i++ + } + dim = &Name{Name: st.str[:i]} + st.advance(i) + } else { + dim = st.expression() + } + + if len(st.str) == 0 || st.str[0] != '_' { + st.fail("expected _ after dimension") + } + st.advance(1) + + t := st.demangleType(isCast) + + arr := &ArrayType{Dimension: dim, Element: t} + + // Qualifiers on the element of an array type go on the whole + // array type. + if q, ok := arr.Element.(*TypeWithQualifiers); ok { + return &TypeWithQualifiers{Base: &ArrayType{Dimension: dim, Element: q.Base}, Qualifiers: q.Qualifiers} + } + + return arr +} + +// ::= Dv _ +// ::= Dv _ _ +func (st *state) vectorType(isCast bool) AST { + if len(st.str) == 0 { + st.fail("expected vector dimension") + } + + var dim AST + if st.str[0] == '_' { + st.advance(1) + dim = st.expression() + } else { + num := st.number() + dim = &Name{Name: fmt.Sprintf("%d", num)} + } + + if len(st.str) == 0 || st.str[0] != '_' { + st.fail("expected _ after vector dimension") + } + st.advance(1) + + t := st.demangleType(isCast) + + return &VectorType{Dimension: dim, Base: t} +} + +// ::= M <(class) type> <(member) type> +func (st *state) pointerToMemberType(isCast bool) AST { + st.checkChar('M') + cl := st.demangleType(false) + + // The ABI says, "The type of a non-static member function is + // considered to be different, for the purposes of + // substitution, from the type of a namespace-scope or static + // member function whose type appears similar. The types of + // two non-static member functions are considered to be + // different, for the purposes of substitution, if the + // functions are members of different classes. In other words, + // for the purposes of substitution, the class of which the + // function is a member is considered part of the type of + // function." + // + // For a pointer to member function, this call to demangleType + // will end up adding a (possibly qualified) non-member + // function type to the substitution table, which is not + // correct; however, the member function type will never be + // used in a substitution, so putting the wrong type in the + // substitution table is harmless. + mem := st.demangleType(isCast) + return &PtrMem{Class: cl, Member: mem} +} + +// _ */ +func (st *state) compactNumber() int { + if len(st.str) == 0 { + st.fail("missing index") + } + if st.str[0] == '_' { + st.advance(1) + return 0 + } else if st.str[0] == 'n' { + st.fail("unexpected negative number") + } + n := st.number() + if len(st.str) == 0 || st.str[0] != '_' { + st.fail("missing underscore after number") + } + st.advance(1) + return n + 1 +} + +// ::= T_ +// ::= T <(parameter-2 non-negative) number> _ +// +// When a template parameter is a substitution candidate, any +// reference to that substitution refers to the template parameter +// with the same index in the currently active template, not to +// whatever the template parameter would be expanded to here. We sort +// this out in substitution and simplify. +func (st *state) templateParam() AST { + if len(st.templates) == 0 { + st.fail("template parameter not in scope of template") + } + off := st.off + + st.checkChar('T') + n := st.compactNumber() + + template := st.templates[len(st.templates)-1] + + if template == nil { + // We are parsing a cast operator. If the cast is + // itself a template, then this is a forward + // reference. Fill it in later. + return &TemplateParam{Index: n, Template: nil} + } + + if n >= len(template.Args) { + st.failEarlier(fmt.Sprintf("template index out of range (%d >= %d)", n, len(template.Args)), st.off-off) + } + + return &TemplateParam{Index: n, Template: template} +} + +// setTemplate sets the Template field of any TemplateParam's in a. +// This handles the forward referencing template parameters found in +// cast operators. +func (st *state) setTemplate(a AST, tmpl *Template) { + a.Traverse(func(a AST) bool { + switch a := a.(type) { + case *TemplateParam: + if a.Template != nil { + panic("internal error") + } + if tmpl == nil { + st.fail("cast template parameter not in scope of template") + } + if a.Index >= len(tmpl.Args) { + st.fail(fmt.Sprintf("cast template index out of range (%d >= %d)", a.Index, len(tmpl.Args))) + } + a.Template = tmpl + return false + default: + return true + } + }) +} + +// ::= I + E +func (st *state) templateArgs() []AST { + if len(st.str) == 0 || (st.str[0] != 'I' && st.str[0] != 'J') { + panic("internal error") + } + st.advance(1) + + var ret []AST + for len(st.str) == 0 || st.str[0] != 'E' { + arg := st.templateArg() + ret = append(ret, arg) + } + st.advance(1) + return ret +} + +// ::= +// ::= X E +// ::= +func (st *state) templateArg() AST { + if len(st.str) == 0 { + st.fail("missing template argument") + } + switch st.str[0] { + case 'X': + st.advance(1) + expr := st.expression() + if len(st.str) == 0 || st.str[0] != 'E' { + st.fail("missing end of expression") + } + st.advance(1) + return expr + + case 'L': + return st.exprPrimary() + + case 'I', 'J': + args := st.templateArgs() + return &ArgumentPack{Args: args} + + default: + return st.demangleType(false) + } +} + +// exprList parses a sequence of expressions up to a terminating character. +func (st *state) exprList(stop byte) AST { + if len(st.str) > 0 && st.str[0] == stop { + st.advance(1) + return &ExprList{Exprs: nil} + } + + var exprs []AST + for { + e := st.expression() + exprs = append(exprs, e) + if len(st.str) > 0 && st.str[0] == stop { + st.advance(1) + break + } + } + return &ExprList{Exprs: exprs} +} + +// ::= <(unary) operator-name> +// ::= <(binary) operator-name> +// ::= <(trinary) operator-name> +// ::= cl + E +// ::= st +// ::= +// ::= sr +// ::= sr +// ::= +func (st *state) expression() AST { + if len(st.str) == 0 { + st.fail("expected expression") + } + if st.str[0] == 'L' { + return st.exprPrimary() + } else if st.str[0] == 'T' { + return st.templateParam() + } else if st.str[0] == 's' && len(st.str) > 1 && st.str[1] == 'r' { + st.advance(2) + if len(st.str) == 0 { + st.fail("expected unresolved type") + } + switch st.str[0] { + case 'T', 'D', 'S': + t := st.demangleType(false) + n := st.unqualifiedName() + n = &Qualified{Scope: t, Name: n, LocalName: false} + if len(st.str) > 0 && st.str[0] == 'I' { + args := st.templateArgs() + n = &Template{Name: n, Args: args} + } + return n + default: + var s AST + if st.str[0] == 'N' { + st.advance(1) + s = st.demangleType(false) + } + for len(st.str) == 0 || st.str[0] != 'E' { + // GCC does not seem to follow the ABI here. + // It can emit type/name without an 'E'. + if s != nil && len(st.str) > 0 && !isDigit(st.str[0]) { + if q, ok := s.(*Qualified); ok { + a := q.Scope + if t, ok := a.(*Template); ok { + st.subs.add(t.Name) + st.subs.add(t) + } else { + st.subs.add(a) + } + return s + } + } + n := st.sourceName() + if len(st.str) > 0 && st.str[0] == 'I' { + args := st.templateArgs() + n = &Template{Name: n, Args: args} + } + if s == nil { + s = n + } else { + s = &Qualified{Scope: s, Name: n, LocalName: false} + } + } + if s == nil { + st.fail("missing scope in unresolved name") + } + st.advance(1) + // TODO(iant): Handle "on" and "dn". + n := st.sourceName() + if len(st.str) > 0 && st.str[0] == 'I' { + args := st.templateArgs() + n = &Template{Name: n, Args: args} + } + return &Qualified{Scope: s, Name: n, LocalName: false} + } + } else if st.str[0] == 's' && len(st.str) > 1 && st.str[1] == 'p' { + st.advance(2) + e := st.expression() + pack := st.findArgumentPack(e) + return &PackExpansion{Base: e, Pack: pack} + } else if st.str[0] == 'f' && len(st.str) > 1 && st.str[1] == 'p' { + st.advance(2) + if len(st.str) > 0 && st.str[0] == 'T' { + st.advance(1) + return &FunctionParam{Index: 0} + } else { + index := st.compactNumber() + return &FunctionParam{Index: index + 1} + } + } else if isDigit(st.str[0]) || (st.str[0] == 'o' && len(st.str) > 1 && st.str[1] == 'n') { + if st.str[0] == 'o' { + // Skip operator function ID. + st.advance(2) + } + n := st.unqualifiedName() + if len(st.str) > 0 && st.str[0] == 'I' { + args := st.templateArgs() + n = &Template{Name: n, Args: args} + } + return n + } else if (st.str[0] == 'i' || st.str[0] == 't') && len(st.str) > 1 && st.str[1] == 'l' { + // Brace-enclosed initializer list. + c := st.str[0] + st.advance(2) + var t AST + if c == 't' { + t = st.demangleType(false) + } + exprs := st.exprList('E') + return &InitializerList{Type: t, Exprs: exprs} + } else if st.str[0] == 's' && len(st.str) > 1 && st.str[1] == 't' { + o, _ := st.operatorName(true) + t := st.demangleType(false) + return &Unary{Op: o, Expr: t, Suffix: false, SizeofType: true} + } else { + if len(st.str) < 2 { + st.fail("missing operator code") + } + code := st.str[:2] + o, args := st.operatorName(true) + switch args { + case 0: + return &Nullary{Op: o} + + case 1: + suffix := false + if code == "pp" || code == "mm" { + if len(st.str) > 0 && st.str[0] == '_' { + st.advance(1) + } else { + suffix = true + } + } + var operand AST + if _, ok := o.(*Cast); ok && len(st.str) > 0 && st.str[0] == '_' { + st.advance(1) + operand = st.exprList('E') + } else { + operand = st.expression() + } + return &Unary{Op: o, Expr: operand, Suffix: suffix, SizeofType: false} + + case 2: + var left, right AST + if code == "sc" || code == "dc" || code == "cc" || code == "rc" { + left = st.demangleType(false) + } else { + left = st.expression() + } + if code == "cl" { + right = st.exprList('E') + } else if code == "dt" || code == "pt" { + right = st.unqualifiedName() + if len(st.str) > 0 && st.str[0] == 'I' { + args := st.templateArgs() + right = &Template{Name: right, Args: args} + } + } else { + right = st.expression() + } + return &Binary{Op: o, Left: left, Right: right} + + case 3: + if code[0] == 'n' { + if code[1] != 'w' && code[1] != 'a' { + panic("internal error") + } + place := st.exprList('_') + if place.(*ExprList).Exprs == nil { + place = nil + } + t := st.demangleType(false) + var ini AST + if len(st.str) > 0 && st.str[0] == 'E' { + st.advance(1) + } else if len(st.str) > 1 && st.str[0] == 'p' && st.str[1] == 'i' { + // Parenthesized initializer. + st.advance(2) + ini = st.exprList('E') + } else if len(st.str) > 1 && st.str[0] == 'i' && st.str[1] == 'l' { + // Initializer list. + ini = st.expression() + } else { + st.fail("unrecognized new initializer") + } + return &New{Op: o, Place: place, Type: t, Init: ini} + } else { + first := st.expression() + second := st.expression() + third := st.expression() + return &Trinary{Op: o, First: first, Second: second, Third: third} + } + + default: + st.fail(fmt.Sprintf("unsupported number of operator arguments: %d", args)) + panic("not reached") + } + } +} + +// ::= L <(value) number> E +// ::= L <(value) float> E +// ::= L E +func (st *state) exprPrimary() AST { + st.checkChar('L') + if len(st.str) == 0 { + st.fail("expected primary expression") + + } + + // Check for 'Z' here because g++ incorrectly omitted the + // underscore until -fabi-version=3. + var ret AST + if st.str[0] == '_' || st.str[0] == 'Z' { + if st.str[0] == '_' { + st.advance(1) + } + if len(st.str) == 0 || st.str[0] != 'Z' { + st.fail("expected mangled name") + } + st.advance(1) + ret = st.encoding(true) + } else { + t := st.demangleType(false) + + neg := false + if len(st.str) > 0 && st.str[0] == 'n' { + neg = true + st.advance(1) + } + i := 0 + for len(st.str) > 0 && st.str[i] != 'E' { + i++ + } + val := st.str[:i] + st.advance(i) + ret = &Literal{Type: t, Val: val, Neg: neg} + } + if len(st.str) == 0 || st.str[0] != 'E' { + st.fail("expected E after literal") + } + st.advance(1) + return ret +} + +// ::= _ <(non-negative) number> +func (st *state) discriminator(a AST) AST { + if len(st.str) == 0 || st.str[0] != '_' { + return a + } + off := st.off + st.advance(1) + d := st.number() + if d < 0 { + st.failEarlier("invalid negative discriminator", st.off-off) + } + // We don't currently print out the discriminator, so we don't + // save it. + return a +} + +// ::= Ul E [ ] _ +func (st *state) closureTypeName() AST { + st.checkChar('U') + st.checkChar('l') + types := st.parmlist() + if len(st.str) == 0 || st.str[0] != 'E' { + st.fail("expected E after closure type name") + } + st.advance(1) + num := st.compactNumber() + ret := &Closure{Types: types, Num: num} + st.subs.add(ret) + return ret +} + +// ::= Ut [ ] _ +func (st *state) unnamedTypeName() AST { + st.checkChar('U') + st.checkChar('t') + num := st.compactNumber() + ret := &UnnamedType{Num: num} + st.subs.add(ret) + return ret +} + +// Recognize a clone suffix. These are not part of the mangling API, +// but are added by GCC when cloning functions. +func (st *state) cloneSuffix(a AST) AST { + i := 0 + if len(st.str) > 1 && st.str[0] == '.' && (isLower(st.str[1]) || st.str[1] == '_') { + i += 2 + for len(st.str) > i && (isLower(st.str[i]) || st.str[i] == '_') { + i++ + } + } + for len(st.str) > i+1 && st.str[i] == '.' && isDigit(st.str[i+1]) { + i += 2 + for len(st.str) > i && isDigit(st.str[i]) { + i++ + } + } + suffix := st.str[:i] + st.advance(i) + return &Clone{Base: a, Suffix: suffix} +} + +// substitutions is the list of substitution candidates that may +// appear later in the string. +type substitutions []AST + +// add adds a new substitution candidate. +func (subs *substitutions) add(a AST) { + *subs = append(*subs, a) +} + +// subAST maps standard substitution codes to the corresponding AST. +var subAST = map[byte]AST{ + 't': &Name{Name: "std"}, + 'a': &Qualified{Scope: &Name{Name: "std"}, Name: &Name{Name: "allocator"}}, + 'b': &Qualified{Scope: &Name{Name: "std"}, Name: &Name{Name: "basic_string"}}, + 's': &Qualified{Scope: &Name{Name: "std"}, Name: &Name{Name: "string"}}, + 'i': &Qualified{Scope: &Name{Name: "std"}, Name: &Name{Name: "istream"}}, + 'o': &Qualified{Scope: &Name{Name: "std"}, Name: &Name{Name: "ostream"}}, + 'd': &Qualified{Scope: &Name{Name: "std"}, Name: &Name{Name: "iostream"}}, +} + +// verboseAST maps standard substitution codes to the long form of the +// corresponding AST. We use this when the Verbose option is used, to +// match the standard demangler. +var verboseAST = map[byte]AST{ + 't': &Name{Name: "std"}, + 'a': &Qualified{Scope: &Name{Name: "std"}, Name: &Name{Name: "allocator"}}, + 'b': &Qualified{Scope: &Name{Name: "std"}, Name: &Name{Name: "basic_string"}}, + + // std::basic_string, std::allocator > + 's': &Template{ + Name: &Qualified{Scope: &Name{Name: "std"}, Name: &Name{Name: "basic_string"}}, + Args: []AST{ + &BuiltinType{Name: "char"}, + &Template{ + Name: &Qualified{Scope: &Name{Name: "std"}, Name: &Name{Name: "char_traits"}}, + Args: []AST{&BuiltinType{Name: "char"}}}, + &Template{ + Name: &Qualified{Scope: &Name{Name: "std"}, Name: &Name{Name: "allocator"}}, + Args: []AST{&BuiltinType{Name: "char"}}}}}, + // std::basic_istream > + 'i': &Template{ + Name: &Qualified{Scope: &Name{Name: "std"}, Name: &Name{Name: "basic_istream"}}, + Args: []AST{ + &BuiltinType{Name: "char"}, + &Template{ + Name: &Qualified{Scope: &Name{Name: "std"}, Name: &Name{Name: "char_traits"}}, + Args: []AST{&BuiltinType{Name: "char"}}}}}, + // std::basic_ostream > + 'o': &Template{ + Name: &Qualified{Scope: &Name{Name: "std"}, Name: &Name{Name: "basic_ostream"}}, + Args: []AST{ + &BuiltinType{Name: "char"}, + &Template{ + Name: &Qualified{Scope: &Name{Name: "std"}, Name: &Name{Name: "char_traits"}}, + Args: []AST{&BuiltinType{Name: "char"}}}}}, + // std::basic_iostream > + 'd': &Template{ + Name: &Qualified{Scope: &Name{Name: "std"}, Name: &Name{Name: "basic_iostream"}}, + Args: []AST{ + &BuiltinType{Name: "char"}, + &Template{ + Name: &Qualified{Scope: &Name{Name: "std"}, Name: &Name{Name: "char_traits"}}, + Args: []AST{&BuiltinType{Name: "char"}}}}}, +} + +// ::= S _ +// ::= S_ +// ::= St +// ::= Sa +// ::= Sb +// ::= Ss +// ::= Si +// ::= So +// ::= Sd +func (st *state) substitution(forPrefix bool) AST { + st.checkChar('S') + if len(st.str) == 0 { + st.fail("missing substitution index") + } + c := st.str[0] + st.advance(1) + dec := 1 + if c == '_' || isDigit(c) || isUpper(c) { + id := 0 + if c != '_' { + for c != '_' { + // Don't overflow a 32-bit int. + if id >= 0x80000000/36-36 { + st.fail("substitution index overflow") + } + if isDigit(c) { + id = id*36 + int(c-'0') + } else if isUpper(c) { + id = id*36 + int(c-'A') + 10 + } else { + st.fail("invalid character in substitution index") + } + + if len(st.str) == 0 { + st.fail("missing end to substitution index") + } + c = st.str[0] + st.advance(1) + dec++ + } + id++ + } + + if id >= len(st.subs) { + st.failEarlier(fmt.Sprintf("substitution index out of range (%d >= %d)", id, len(st.subs)), dec) + } + + ret := st.subs[id] + + // We need to update any references to template + // parameters to refer to the currently active + // template. + copy := func(a AST) AST { + tp, ok := a.(*TemplateParam) + if !ok { + return nil + } + if len(st.templates) == 0 { + st.failEarlier("substituted template parameter not in scope of template", dec) + } + template := st.templates[len(st.templates)-1] + if template == nil { + // This template parameter is within + // the scope of a cast operator. + return &TemplateParam{Index: tp.Index, Template: nil} + } + + if tp.Index >= len(template.Args) { + st.failEarlier(fmt.Sprintf("substituted template index out of range (%d >= %d)", tp.Index, len(template.Args)), dec) + } + + return &TemplateParam{Index: tp.Index, Template: template} + } + skip := func(a AST) bool { + if _, ok := a.(*Typed); ok { + return true + } + return false + } + if c := ret.Copy(copy, skip); c != nil { + return c + } + + return ret + } else { + m := subAST + if st.verbose { + m = verboseAST + } + // For compatibility with the standard demangler, use + // a longer name for a constructor or destructor. + if forPrefix && len(st.str) > 0 && (st.str[0] == 'C' || st.str[0] == 'D') { + m = verboseAST + } + a, ok := m[c] + if !ok { + st.failEarlier("unrecognized substitution code", 1) + } + + if len(st.str) > 0 && st.str[0] == 'B' { + a = st.taggedName(a) + } + + return a + } +} + +// isDigit returns whetner c is a digit for demangling purposes. +func isDigit(c byte) bool { + return c >= '0' && c <= '9' +} + +// isUpper returns whether c is an upper case letter for demangling purposes. +func isUpper(c byte) bool { + return c >= 'A' && c <= 'Z' +} + +// isLower returns whether c is a lower case letter for demangling purposes. +func isLower(c byte) bool { + return c >= 'a' && c <= 'z' +} + +// simplify replaces template parameters with their expansions, and +// merges qualifiers. +func simplify(a AST) AST { + skip := func(a AST) bool { + return false + } + if r := a.Copy(simplifyOne, skip); r != nil { + return r + } + return a +} + +// simplifyOne simplifies a single AST. It returns nil if there is +// nothing to do. +func simplifyOne(a AST) AST { + switch a := a.(type) { + case *TemplateParam: + if a.Template == nil || a.Index >= len(a.Template.Args) { + panic("internal error") + } + return a.Template.Args[a.Index] + case *MethodWithQualifiers: + if m, ok := a.Method.(*MethodWithQualifiers); ok { + ref := a.RefQualifier + if ref == "" { + ref = m.RefQualifier + } else if m.RefQualifier != "" { + if ref == "&" || m.RefQualifier == "&" { + ref = "&" + } + } + return &MethodWithQualifiers{Method: m.Method, Qualifiers: mergeQualifiers(a.Qualifiers, m.Qualifiers), RefQualifier: ref} + } + if t, ok := a.Method.(*TypeWithQualifiers); ok { + return &MethodWithQualifiers{Method: t.Base, Qualifiers: mergeQualifiers(a.Qualifiers, t.Qualifiers), RefQualifier: a.RefQualifier} + } + case *TypeWithQualifiers: + if ft, ok := a.Base.(*FunctionType); ok { + return &MethodWithQualifiers{Method: ft, Qualifiers: a.Qualifiers, RefQualifier: ""} + } + if t, ok := a.Base.(*TypeWithQualifiers); ok { + return &TypeWithQualifiers{Base: t.Base, Qualifiers: mergeQualifiers(a.Qualifiers, t.Qualifiers)} + } + if m, ok := a.Base.(*MethodWithQualifiers); ok { + return &MethodWithQualifiers{Method: m.Method, Qualifiers: mergeQualifiers(a.Qualifiers, m.Qualifiers), RefQualifier: m.RefQualifier} + } + case *ReferenceType: + if rt, ok := a.Base.(*ReferenceType); ok { + return rt + } + if rrt, ok := a.Base.(*RvalueReferenceType); ok { + return &ReferenceType{Base: rrt.Base} + } + case *RvalueReferenceType: + if rrt, ok := a.Base.(*RvalueReferenceType); ok { + return rrt + } + if rt, ok := a.Base.(*ReferenceType); ok { + return rt + } + case *ArrayType: + // Qualifiers on the element of an array type + // go on the whole array type. + if q, ok := a.Element.(*TypeWithQualifiers); ok { + return &TypeWithQualifiers{ + Base: &ArrayType{Dimension: a.Dimension, Element: q.Base}, + Qualifiers: q.Qualifiers, + } + } + case *PackExpansion: + // Expand the pack and replace it with a list of + // expressions. + if a.Pack != nil { + exprs := make([]AST, len(a.Pack.Args)) + for i, arg := range a.Pack.Args { + copy := func(sub AST) AST { + // Replace the ArgumentPack + // with a specific argument. + if sub == a.Pack { + return arg + } + // Copy everything else. + return nil + } + + skip := func(sub AST) bool { + // Don't traverse into another + // pack expansion. + _, skip := sub.(*PackExpansion) + return skip + } + + exprs[i] = simplify(a.Base.Copy(copy, skip)) + } + return &ExprList{Exprs: exprs} + } + } + return nil +} + +// findArgumentPack walks the AST looking for the argument pack for a +// pack expansion. We find it via a template parameter. +func (st *state) findArgumentPack(a AST) *ArgumentPack { + var ret *ArgumentPack + a.Traverse(func(a AST) bool { + if ret != nil { + return false + } + switch a := a.(type) { + case *TemplateParam: + if a.Template == nil || a.Index >= len(a.Template.Args) { + return true + } + if pack, ok := a.Template.Args[a.Index].(*ArgumentPack); ok { + ret = pack + return false + } + case *PackExpansion, *Closure, *Name: + return false + case *TaggedName, *Operator, *BuiltinType, *FunctionParam: + return false + case *UnnamedType, *FixedType, *DefaultArg: + return false + } + return true + }) + return ret +} diff --git a/third_party/src/golang/demangle/demangle_test.go b/third_party/src/golang/demangle/demangle_test.go new file mode 100644 index 00000000..b981ca6c --- /dev/null +++ b/third_party/src/golang/demangle/demangle_test.go @@ -0,0 +1,292 @@ +// Copyright 2015 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package demangle + +import ( + "strconv" + "strings" + "testing" +) + +// Check test cases discovered after the code passed the tests in +// demangle-expected (which are tested by TestExpected). Some of this +// are cases where we differ from the standard demangler, some we are +// the same but we weren't initially. +func TestDemangler(t *testing.T) { + var tests = []struct { + input string + want string + wantNoParams string + wantNoTemplateParams string + wantMinimal string + }{ + { + "_ZNSaIcEC1ERKS_", + "std::allocator::allocator(std::allocator const&)", + "std::allocator::allocator", + "std::allocator::allocator(std::allocator const&)", + "std::allocator::allocator", + }, + { + "_ZN9__gnu_cxx13stdio_filebufIcSt11char_traitsIcEEC1EP8_IO_FILESt13_Ios_Openmodem", + "__gnu_cxx::stdio_filebuf >::stdio_filebuf(_IO_FILE*, std::_Ios_Openmode, unsigned long)", + "__gnu_cxx::stdio_filebuf >::stdio_filebuf", + "__gnu_cxx::stdio_filebuf::stdio_filebuf(_IO_FILE*, std::_Ios_Openmode, unsigned long)", + "__gnu_cxx::stdio_filebuf::stdio_filebuf", + }, + { + "_ZN1n1CcvNS_1DIT_EEI1EEEv", + "n::C::operator n::D()", + "n::C::operator n::D", + "n::C::operator n::D()", + "n::C::operator n::D", + }, + { + "_Z1CIvPN1D1E1FIdJEEEdEPN1GILb0ET_T0_T1_E1HEPFS6_S7_S8_EN1H1I1JIS7_E1KENSG_IS8_E1KE", + "G*, double>::H* C*, double>(void (*)(D::E::F*, double), H::I::J*>::K, H::I::J::K)", + "C*, double>", + "G::H* C(void (*)(D::E::F*, double), H::I::J::K, H::I::J::K)", + "C", + }, + { + "_ZZNK1CI1DIcSt1EIcESaIcEEJEE1FEvE1F", + "C, std::allocator > >::F() const::F", + "C, std::allocator > >::F() const::F", + "C::F() const::F", + "C::F() const::F", + }, + { + "_ZN1CI1DSt1EIK1FN1G1HEEE1I1JIJRKS6_EEEvDpOT_", + "void C >::I::J const&>(std::E const&)", + "C >::I::J const&>", + "void C::I::J(std::E const&)", + "C::I::J", + }, + { + "_ZN1C1D1E1FIJEEEvi1GDpT_", + "void C::D::E::F<>(int, G)", + "C::D::E::F<>", + "void C::D::E::F(int, G)", + "C::D::E::F", + }, + { + "_ZN1CILj50ELb1EE1DEv", + "C<50u, true>::D()", + "C<50u, true>::D", + "C::D()", + "C::D", + }, + { + "_ZN1CUt_C2Ev", + "C::{unnamed type#1}::{unnamed type#1}()", + "C::{unnamed type#1}::{unnamed type#1}", + "C::{unnamed type#1}::{unnamed type#1}()", + "C::{unnamed type#1}::{unnamed type#1}", + }, + { + "_ZN1C12_GLOBAL__N_11DINS_1EEEEN1F1GIDTadcldtcvT__E1HEEEERKS5_NS_1I1JE", + "F::G C::(anonymous namespace)::D(C::E const&, C::I::J)", + "C::(anonymous namespace)::D", + "F::G C::(anonymous namespace)::D(C::E const&, C::I::J)", + "C::(anonymous namespace)::D", + }, + { + "_ZN1CI1DE1EIJiRiRPKcRA1_S4_S8_bS6_S3_RjRPKN1F1GERPKN1H1IEEEEvDpOT_", + "void C::E(int&&, int&, char const*&, char const (&) [1], char const (&) [1], bool&&, char const*&, int&, unsigned int&, F::G const*&, H::I const*&)", + "C::E", + "void C::E(int&&, int&, char const*&, char const (&) [1], char const (&) [1], bool&&, char const*&, int&, unsigned int&, F::G const*&, H::I const*&)", + "C::E", + }, + { + "_ZN1C12_GLOBAL__N_11DIFbPKNS_1EEEEEvPNS_1FERKT_", + "void C::(anonymous namespace)::D(C::F*, bool (&)(C::E const*) const)", + "C::(anonymous namespace)::D", + "void C::(anonymous namespace)::D(C::F*, bool (&)(C::E const*) const)", + "C::(anonymous namespace)::D", + }, + { + "_ZN1C1D1EIJRFviSt1FIFvRKN1G1H1IEEERKSt6vectorINS_1JESaISB_EEERiS9_EvEENS0_1K1LIJDpNSt1MIT_E1NEEEEDpOSM_", + "C::D::K::L, std::vector > const&)>::N, std::M::N, std::M >::N> C::D::E, std::vector > const&), int&, std::F, void>(void (&)(int, std::F, std::vector > const&), int&, std::F&&)", + "C::D::E, std::vector > const&), int&, std::F, void>", + "C::D::K::L C::D::E(void (&)(int, std::F, std::vector const&), int&, std::F&&)", + "C::D::E", + }, + { + "_ZN1C1D1E1FcvNS_1GIT_EEI1HEEv", + "C::D::E::F::operator C::G()", + "C::D::E::F::operator C::G", + "C::D::E::F::operator C::G()", + "C::D::E::F::operator C::G", + }, + { + "_ZN9__gnu_cxx17__normal_iteratorIPK1EIN1F1G1HEESt6vectorIS5_SaIS5_EEEC2IPS5_EERKNS0_IT_NS_11__enable_ifIXsr3std10__are_sameISE_SD_EE7__valueESA_E1IEEE", + "__gnu_cxx::__normal_iterator const*, std::vector, std::allocator > > >::__normal_iterator*>(__gnu_cxx::__normal_iterator*, __gnu_cxx::__enable_if*, E*>::__value, std::vector, std::allocator > > >::I> const&)", + "__gnu_cxx::__normal_iterator const*, std::vector, std::allocator > > >::__normal_iterator*>", + "__gnu_cxx::__normal_iterator::__normal_iterator(__gnu_cxx::__normal_iterator const&)", + "__gnu_cxx::__normal_iterator::__normal_iterator", + }, + { + "_ZNKSt1CIM1DKFjvEEclIJEvEEjPKS0_DpOT_", + "unsigned int std::C::operator()(D const*) const", + "std::C::operator()", + "unsigned int std::C::operator()(D const*) const", + "std::C::operator()", + }, + { + "_ZNSt10_HashtableI12basic_stringIcSt11char_traitsIcESaIcEESt4pairIKS4_N1C1D1EEESaISA_ENSt8__detail10_Select1stESt8equal_toIS4_ESt4hashIS4_ENSC_18_Mod_range_hashingENSC_20_Default_ranged_hashENSC_20_Prime_rehash_policyENSC_17_Hashtable_traitsILb1ELb0ELb1EEEE9_M_assignIZNSN_C1ERKSN_EUlPKNSC_10_Hash_nodeISA_Lb1EEEE_EEvSQ_RKT_", + "void std::_Hashtable, std::allocator >, std::pair, std::allocator > const, C::D::E>, std::allocator, std::allocator > const, C::D::E> >, std::__detail::_Select1st, std::equal_to, std::allocator > >, std::hash, std::allocator > >, std::__detail::_Mod_range_hashing, std::__detail::_Default_ranged_hash, std::__detail::_Prime_rehash_policy, std::__detail::_Hashtable_traits >::_M_assign, std::allocator >, std::pair, std::allocator > const, C::D::E>, std::allocator, std::allocator > const, C::D::E> >, std::__detail::_Select1st, std::equal_to, std::allocator > >, std::hash, std::allocator > >, std::__detail::_Mod_range_hashing, std::__detail::_Default_ranged_hash, std::__detail::_Prime_rehash_policy, std::__detail::_Hashtable_traits >::_Hashtable(std::_Hashtable, std::allocator >, std::pair, std::allocator > const, C::D::E>, std::allocator, std::allocator > const, C::D::E> >, std::__detail::_Select1st, std::equal_to, std::allocator > >, std::hash, std::allocator > >, std::__detail::_Mod_range_hashing, std::__detail::_Default_ranged_hash, std::__detail::_Prime_rehash_policy, std::__detail::_Hashtable_traits > const&)::{lambda(std::__detail::_Hash_node, std::allocator > const, C::D::E>, true> const*)#1}>(std::_Hashtable, std::allocator >, std::pair, std::allocator > const, C::D::E>, std::allocator, std::allocator > const, C::D::E> >, std::__detail::_Select1st, std::equal_to, std::allocator > >, std::hash, std::allocator > >, std::__detail::_Mod_range_hashing, std::__detail::_Default_ranged_hash, std::__detail::_Prime_rehash_policy, std::__detail::_Hashtable_traits > const&, std::_Hashtable, std::allocator >, std::pair, std::allocator > const, C::D::E>, std::allocator, std::allocator > const, C::D::E> >, std::__detail::_Select1st, std::equal_to, std::allocator > >, std::hash, std::allocator > >, std::__detail::_Mod_range_hashing, std::__detail::_Default_ranged_hash, std::__detail::_Prime_rehash_policy, std::__detail::_Hashtable_traits >::_Hashtable(std::_Hashtable, std::allocator >, std::pair, std::allocator > const, C::D::E>, std::allocator, std::allocator > const, C::D::E> >, std::__detail::_Select1st, std::equal_to, std::allocator > >, std::hash, std::allocator > >, std::__detail::_Mod_range_hashing, std::__detail::_Default_ranged_hash, std::__detail::_Prime_rehash_policy, std::__detail::_Hashtable_traits > const&)::{lambda(std::__detail::_Hash_node, std::allocator > const, C::D::E>, true> const*)#1} const&)", + "std::_Hashtable, std::allocator >, std::pair, std::allocator > const, C::D::E>, std::allocator, std::allocator > const, C::D::E> >, std::__detail::_Select1st, std::equal_to, std::allocator > >, std::hash, std::allocator > >, std::__detail::_Mod_range_hashing, std::__detail::_Default_ranged_hash, std::__detail::_Prime_rehash_policy, std::__detail::_Hashtable_traits >::_M_assign, std::allocator >, std::pair, std::allocator > const, C::D::E>, std::allocator, std::allocator > const, C::D::E> >, std::__detail::_Select1st, std::equal_to, std::allocator > >, std::hash, std::allocator > >, std::__detail::_Mod_range_hashing, std::__detail::_Default_ranged_hash, std::__detail::_Prime_rehash_policy, std::__detail::_Hashtable_traits >::_Hashtable(std::_Hashtable, std::allocator >, std::pair, std::allocator > const, C::D::E>, std::allocator, std::allocator > const, C::D::E> >, std::__detail::_Select1st, std::equal_to, std::allocator > >, std::hash, std::allocator > >, std::__detail::_Mod_range_hashing, std::__detail::_Default_ranged_hash, std::__detail::_Prime_rehash_policy, std::__detail::_Hashtable_traits > const&)::{lambda(std::__detail::_Hash_node, std::allocator > const, C::D::E>, true> const*)#1}>", + "void std::_Hashtable::_M_assign(std::_Hashtable const&, std::_Hashtable::_Hashtable(std::_Hashtable const&)::{lambda(std::__detail::_Hash_node const*)#1} const&)", + "std::_Hashtable::_M_assign", + }, + { + "_ZSt3maxIVdERKT_S3_S3_", + "double const volatile& std::max(double const volatile&, double const volatile&)", + "std::max", + "double const volatile& std::max(double const volatile&, double const volatile&)", + "std::max", + }, + { + "_ZZN1C1D1E1F1G1HEvENUlvE_C2EOS4_", + "C::D::E::F::G::H()::{lambda()#1}::{lambda()#1}({lambda()#1}&&)", + "C::D::E::F::G::H()::{lambda()#1}::{lambda()#1}", + "C::D::E::F::G::H()::{lambda()#1}::{lambda()#1}({lambda()#1}&&)", + "C::D::E::F::G::H()::{lambda()#1}::{lambda()#1}", + }, + { + "_ZThn8_NK1C1D1EEv", + "non-virtual thunk to C::D::E() const", + "non-virtual thunk to C::D::E() const", + "non-virtual thunk to C::D::E() const", + "non-virtual thunk to C::D::E() const", + }, + { + "_ZTv0_n96_NK1C1D1E1FEv", + "virtual thunk to C::D::E::F() const", + "virtual thunk to C::D::E::F() const", + "virtual thunk to C::D::E::F() const", + "virtual thunk to C::D::E::F() const", + }, + { + "_ZTCSt9strstream16_So", + "construction vtable for std::ostream-in-std::strstream", + "construction vtable for std::ostream-in-std::strstream", + "construction vtable for std::ostream-in-std::strstream", + "construction vtable for std::ostream-in-std::strstream", + }, + { + "_ZGVZZN1C1D1EEvENK3$_0clEvE1F", + "guard variable for C::D::E()::$_0::operator()() const::F", + "guard variable for C::D::E()::$_0::operator()() const::F", + "guard variable for C::D::E()::$_0::operator()() const::F", + "guard variable for C::D::E()::$_0::operator()() const::F", + }, + { + "_Z1fICiEvT_", + "void f(int _Complex)", + "f", + "void f(int _Complex)", + "f", + }, + { + "_GLOBAL__D__Z2fnv", + "global destructors keyed to fn()", + "global destructors keyed to fn()", + "global destructors keyed to fn()", + "global destructors keyed to fn()", + }, + { + "_Z1fIXadL_Z1hvEEEvv", + "void f<&h>()", + "f<&h>", + "void f()", + "f", + }, + { + "_Z1CIP1DEiRK1EPT_N1F1GIS5_Xaasr1HIS5_E1IntsrSA_1JEE1KE", + "int C(E const&, D**, F::G::I&&(!H::J)>::K)", + "C", + "int C(E const&, D**, F::G::K)", + "C", + }, + } + + for _, test := range tests { + if got, err := ToString(test.input); err != nil { + t.Errorf("demangling %s: unexpected error %v", test.input, err) + } else if got != test.want { + t.Errorf("demangling %s: got %s, want %s", test.input, got, test.want) + } + + if got, err := ToString(test.input, NoParams); err != nil { + t.Errorf("demangling NoParams %s: unexpected error %v", test.input, err) + } else if got != test.wantNoParams { + t.Errorf("demangling NoParams %s: got %s, want %s", test.input, got, test.wantNoParams) + } + + if got, err := ToString(test.input, NoTemplateParams); err != nil { + t.Errorf("demangling NoTemplateParams %s: unexpected error %v", test.input, err) + } else if got != test.wantNoTemplateParams { + t.Errorf("demangling NoTemplateParams %s: got %s, want %s", test.input, got, test.wantNoTemplateParams) + } + + if got, err := ToString(test.input, NoParams, NoTemplateParams); err != nil { + t.Errorf("demangling NoTemplateParams %s: unexpected error %v", test.input, err) + } else if got != test.wantMinimal { + t.Errorf("demangling Minimal %s: got %s, want %s", test.input, got, test.wantMinimal) + } + + // Test Filter also. + if got := Filter(test.input); got != test.want { + t.Errorf("Filter(%s) == %s, want %s", test.input, got, test.want) + } + } +} + +// Test for some failure cases. +func TestFailure(t *testing.T) { + var tests = []struct { + input string + error string + off int + }{ + { + "_Z1FE", + "unparsed characters at end of mangled name", + 4, + }, + { + "_Z1FQ", + "unrecognized type code", + 4, + }, + } + + for _, test := range tests { + got, err := ToString(test.input) + if err == nil { + t.Errorf("unexpected success for %s: %s", test.input, got) + } else if !strings.Contains(err.Error(), test.error) { + t.Errorf("unexpected error for %s: %v", test.input, err) + } else { + s := err.Error() + i := strings.LastIndex(s, " at ") + if i < 0 { + t.Errorf("missing offset in error for %s: %v", test.input, err) + } else { + off, oerr := strconv.Atoi(s[i+4:]) + if oerr != nil { + t.Errorf("can't parse offset (%s) for %s: %v", s[i+4:], test.input, err) + } else if off != test.off { + t.Errorf("unexpected offset for %s: got %d, want %d", test.input, off, test.off) + } + } + } + + if got := Filter(test.input); got != test.input { + t.Errorf("Filter(%s) == %s, want %s", test.input, got, test.input) + } + } +} diff --git a/third_party/src/golang/demangle/expected_test.go b/third_party/src/golang/demangle/expected_test.go new file mode 100644 index 00000000..50f2069a --- /dev/null +++ b/third_party/src/golang/demangle/expected_test.go @@ -0,0 +1,182 @@ +// Copyright 2015 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package demangle + +import ( + "bufio" + "flag" + "fmt" + "os" + "strings" + "testing" +) + +var verbose = flag.Bool("verbose", false, "print each demangle-expected symbol") + +const filename = "testdata/demangle-expected" + +// A list of exceptions from demangle-expected that we do not handle +// the same as the standard demangler. We keep a list of exceptions +// so that we can use an exact copy of the file. These exceptions are +// all based on different handling of a substitution that refers to a +// template parameter. The standard demangler seems to have a bug in +// which template it uses when a reference or rvalue-reference refers +// to a substitution that resolves to a template parameter. +var exceptions = map[string]bool{ + "_ZSt7forwardIRN1x14refobjiteratorINS0_3refINS0_4mime30multipart_section_processorObjIZ15get_body_parserIZZN14mime_processor21make_section_iteratorERKNS2_INS3_10sectionObjENS0_10ptrrefBaseEEEbENKUlvE_clEvEUlSB_bE_ZZNS6_21make_section_iteratorESB_bENKSC_clEvEUlSB_E0_ENS1_INS2_INS0_20outputrefiteratorObjIiEES8_EEEERKSsSB_OT_OT0_EUlmE_NS3_32make_multipart_default_discarderISP_EEEES8_EEEEEOT_RNSt16remove_referenceISW_E4typeE": true, + "_ZN3mdr16in_cached_threadIRZNK4cudr6GPUSet17parallel_for_eachIZN5tns3d20shape_representation7compute7GPUImpl7executeERKNS_1AINS_7ptr_refIKjEELl3ELl3ENS_8c_strideILl1ELl0EEEEERKNS8_INS9_IjEELl4ELl1ESD_EEEUliRKNS1_7ContextERNS7_5StateEE_JSt6vectorISO_SaISO_EEEEEvOT_DpRT0_EUlSP_E_JSt17reference_wrapperISO_EEEENS_12ScopedFutureIDTclfp_spcl7forwardISW_Efp0_EEEEESV_DpOSW_": true, + "_ZNSt9_Any_data9_M_accessIPZN3sel8Selector6SetObjI3FooJPKcMS4_FviEEEEvRT_DpT0_EUlvE_EESA_v": true, + "_ZNSt9_Any_data9_M_accessIPZN13ThreadManager7newTaskIRSt5_BindIFSt7_Mem_fnIM5DiaryFivEEPS5_EEIEEESt6futureINSt9result_ofIFT_DpT0_EE4typeEEOSF_DpOSG_EUlvE_EERSF_v": true, + "_ZNSt9_Any_data9_M_accessIPZN6cereal18polymorphic_detail15getInputBindingINS1_16JSONInputArchiveEEENS1_6detail15InputBindingMapIT_E11SerializersERS7_jEUlPvRSt10unique_ptrIvNS5_12EmptyDeleterIvEEEE0_EESA_v": true, + "_ZNSt9_Any_data9_M_accessIPZ4postISt8functionIFvvEEEvOT_EUlvE_EERS5_v": true, +} + +// For simplicity, this test reads an exact copy of +// libiberty/testsuite/demangle-expected from GCC. See that file for +// the syntax. We ignore all tests that are not --format=gnu-v3 or +// --format=auto with a string starting with _Z. +func TestExpected(t *testing.T) { + f, err := os.Open(filename) + if err != nil { + t.Fatal(err) + } + scanner := bufio.NewScanner(f) + lineno := 1 + for { + format, got := getOptLine(t, scanner, &lineno) + if !got { + break + } + report := lineno + input := getLine(t, scanner, &lineno) + expect := getLine(t, scanner, &lineno) + + testNoParams := false + skip := false + if len(format) > 0 && format[0] == '-' { + for _, arg := range strings.Fields(format) { + switch arg { + case "--format=gnu-v3": + case "--format=auto": + if !strings.HasPrefix(input, "_Z") { + skip = true + } + case "--no-params": + testNoParams = true + case "--ret-postfix", "--ret-drop": + skip = true + case "--is-v3-ctor", "--is-v3-dtor": + skip = true + default: + if !strings.HasPrefix(arg, "--format=") { + t.Errorf("%s:%d: unrecognized argument %s", filename, report, arg) + } + skip = true + } + } + } + + // The libiberty testsuite passes DMGL_TYPES to + // demangle type names, but that doesn't seem useful + // and we don't support it. + if !strings.HasPrefix(input, "_Z") && !strings.HasPrefix(input, "_GLOBAL_") { + skip = true + } + + var expectNoParams string + if testNoParams { + expectNoParams = getLine(t, scanner, &lineno) + } + + if skip { + continue + } + + oneTest(t, report, input, expect, true) + if testNoParams { + oneTest(t, report, input, expectNoParams, false) + } + } + if err := scanner.Err(); err != nil { + t.Error(err) + } +} + +// oneTest tests one entry from demangle-expected. +func oneTest(t *testing.T, report int, input, expect string, params bool) { + if *verbose { + fmt.Println(input) + } + + exception := exceptions[input] + + var s string + var err error + if params { + s, err = ToString(input) + } else { + s, err = ToString(input, NoParams) + } + if err != nil { + if exception { + t.Logf("%s:%d: ignore expected difference: got %q, expected %q", filename, report, err, expect) + return + } + + if err != ErrNotMangledName { + if input == expect { + return + } + t.Errorf("%s:%d: %v", filename, report, err) + return + } + s = input + } + + if s != expect { + if exception { + t.Logf("%s:%d: ignore expected difference: got %q, expected %q", filename, report, s, expect) + } else { + var a AST + if params { + a, err = ToAST(input) + } else { + a, err = ToAST(input, NoParams) + } + if err != nil { + t.Logf("ToAST error: %v", err) + } else { + t.Logf("\n%#v", a) + } + t.Errorf("%s:%d: params: %t: got %q, expected %q", filename, report, params, s, expect) + } + } else if exception && params { + t.Errorf("%s:%d: unexpected success (input listed in exceptions)", filename, report) + } +} + +// getLine reads a line from demangle-expected. +func getLine(t *testing.T, scanner *bufio.Scanner, lineno *int) string { + s, got := getOptLine(t, scanner, lineno) + if !got { + t.Fatalf("%s:%d: unexpected EOF", filename, *lineno) + } + return s +} + +// getOptLine reads an optional line from demangle-expected, returning +// false at EOF. It skips comment lines and updates *lineno. +func getOptLine(t *testing.T, scanner *bufio.Scanner, lineno *int) (string, bool) { + for { + if !scanner.Scan() { + return "", false + } + *lineno++ + line := scanner.Text() + if !strings.HasPrefix(line, "#") { + return line, true + } + } +} diff --git a/third_party/src/golang/demangle/testdata/demangle-expected b/third_party/src/golang/demangle/testdata/demangle-expected new file mode 100644 index 00000000..b0020bac --- /dev/null +++ b/third_party/src/golang/demangle/testdata/demangle-expected @@ -0,0 +1,4400 @@ +# This file holds test cases for the demangler. +# Each test case looks like this: +# options +# input to be demangled +# expected output +# +# Supported options: +# --format= Sets the demangling style. +# --no-params There are two lines of expected output; the first +# is with DMGL_PARAMS, the second is without it. +# --is-v3-ctor Calls is_gnu_v3_mangled_ctor on input; expected +# output is an integer representing ctor_kind. +# --is-v3-dtor Likewise, but for dtors. +# --ret-postfix Passes the DMGL_RET_POSTFIX option +# +# For compatibility, just in case it matters, the options line may be +# empty, to mean --format=auto. If it doesn't start with --, then it +# may contain only a format name. +# +# A line starting with `#' is ignored. +# However, blank lines in this file are NOT ignored. +# +--format=gnu --no-params +AddAlignment__9ivTSolverUiP12ivInteractorP7ivTGlue +ivTSolver::AddAlignment(unsigned int, ivInteractor *, ivTGlue *) +ivTSolver::AddAlignment +# +--format=gnu --no-params +ArrowheadIntersects__9ArrowLineP9ArrowheadR6BoxObjP7Graphic +ArrowLine::ArrowheadIntersects(Arrowhead *, BoxObj &, Graphic *) +ArrowLine::ArrowheadIntersects +# +--format=gnu --no-params +AtEnd__13ivRubberGroup +ivRubberGroup::AtEnd(void) +ivRubberGroup::AtEnd +# +--format=gnu --no-params +BgFilter__9ivTSolverP12ivInteractor +ivTSolver::BgFilter(ivInteractor *) +ivTSolver::BgFilter +# +--format=gnu --no-params +Check__6UArrayi +UArray::Check(int) +UArray::Check +# +--format=gnu --no-params +CoreConstDecls__8TextCodeR7ostream +TextCode::CoreConstDecls(ostream &) +TextCode::CoreConstDecls +# +--format=gnu --no-params +Detach__8StateVarP12StateVarView +StateVar::Detach(StateVarView *) +StateVar::Detach +# +--format=gnu --no-params +Done__9ComponentG8Iterator +Component::Done(Iterator) +Component::Done +# +--format=gnu --no-params +Effect__11RelateManipR7ivEvent +RelateManip::Effect(ivEvent &) +RelateManip::Effect +# +--format=gnu --no-params +FindFixed__FRP4CNetP4CNet +FindFixed(CNet *&, CNet *) +FindFixed +# +--format=gnu --no-params +Fix48_abort__FR8twolongs +Fix48_abort(twolongs &) +Fix48_abort +# +--format=gnu --no-params +GetBarInfo__15iv2_6_VScrollerP13ivPerspectiveRiT2 +iv2_6_VScroller::GetBarInfo(ivPerspective *, int &, int &) +iv2_6_VScroller::GetBarInfo +# +--format=gnu --no-params +GetBgColor__C9ivPainter +ivPainter::GetBgColor(void) const +ivPainter::GetBgColor +# +--format=gnu --no-params +InsertBody__15H_PullrightMenuii +H_PullrightMenu::InsertBody(int, int) +H_PullrightMenu::InsertBody +# +--format=gnu --no-params +InsertCharacter__9TextManipc +TextManip::InsertCharacter(char) +TextManip::InsertCharacter +# +--format=gnu --no-params +InsertToplevel__7ivWorldP12ivInteractorT1 +ivWorld::InsertToplevel(ivInteractor *, ivInteractor *) +ivWorld::InsertToplevel +# +--format=gnu --no-params +InsertToplevel__7ivWorldP12ivInteractorT1iiUi +ivWorld::InsertToplevel(ivInteractor *, ivInteractor *, int, int, unsigned int) +ivWorld::InsertToplevel +# +--format=gnu --no-params +IsAGroup__FP11GraphicViewP11GraphicComp +IsAGroup(GraphicView *, GraphicComp *) +IsAGroup +# +--format=gnu --no-params +IsA__10ButtonCodeUl +ButtonCode::IsA(unsigned long) +ButtonCode::IsA +# +--format=gnu --no-params +ReadName__FR7istreamPc +ReadName(istream &, char *) +ReadName +# +--format=gnu --no-params +Redraw__13StringBrowseriiii +StringBrowser::Redraw(int, int, int, int) +StringBrowser::Redraw +# +--format=gnu --no-params +Rotate__13ivTransformerf +ivTransformer::Rotate(float) +ivTransformer::Rotate +# +--format=gnu --no-params +Rotated__C13ivTransformerf +ivTransformer::Rotated(float) const +ivTransformer::Rotated +# +--format=gnu --no-params +Round__Ff +Round(float) +Round +# +--format=gnu --no-params +SetExport__16MemberSharedNameUi +MemberSharedName::SetExport(unsigned int) +MemberSharedName::SetExport +# +--format=gnu --no-params +Set__14ivControlState13ControlStatusUi +ivControlState::Set(ControlStatus, unsigned int) +ivControlState::Set +# +--format=gnu --no-params +Set__5DFacePcii +DFace::Set(char *, int, int) +DFace::Set +# +--format=gnu --no-params +VConvert__9ivTSolverP12ivInteractorRP8TElementT2 +ivTSolver::VConvert(ivInteractor *, TElement *&, TElement *&) +ivTSolver::VConvert +# +--format=gnu --no-params +VConvert__9ivTSolverP7ivTGlueRP8TElement +ivTSolver::VConvert(ivTGlue *, TElement *&) +ivTSolver::VConvert +# +--format=gnu --no-params +VOrder__9ivTSolverUiRP12ivInteractorT2 +ivTSolver::VOrder(unsigned int, ivInteractor *&, ivInteractor *&) +ivTSolver::VOrder +# +--format=gnu --no-params +_10PageButton$__both +PageButton::__both +PageButton::__both +# +--format=gnu --no-params +_3RNG$singleMantissa +RNG::singleMantissa +RNG::singleMantissa +# +--format=gnu --no-params +_5IComp$_release +IComp::_release +IComp::_release +# +--format=gnu --no-params +_$_10BitmapComp +BitmapComp::~BitmapComp(void) +BitmapComp::~BitmapComp +# +--format=gnu --no-params +_$_9__io_defs +__io_defs::~__io_defs(void) +__io_defs::~__io_defs +# +--format=gnu --no-params +_$_Q23foo3bar +foo::bar::~bar(void) +foo::bar::~bar +# +--format=gnu --no-params +_$_Q33foo3bar4bell +foo::bar::bell::~bell(void) +foo::bar::bell::~bell +# +--format=gnu --no-params +__10ivTelltaleiP7ivGlyph +ivTelltale::ivTelltale(int, ivGlyph *) +ivTelltale::ivTelltale +# +--format=gnu --no-params +__10ivViewportiP12ivInteractorUi +ivViewport::ivViewport(int, ivInteractor *, unsigned int) +ivViewport::ivViewport +# +--format=gnu --no-params +__10ostrstream +ostrstream::ostrstream(void) +ostrstream::ostrstream +# +--format=gnu --no-params +__10ostrstreamPcii +ostrstream::ostrstream(char *, int, int) +ostrstream::ostrstream +# +--format=gnu --no-params +__11BitmapTablei +BitmapTable::BitmapTable(int) +BitmapTable::BitmapTable +# +--format=gnu --no-params +__12ViewportCodeP12ViewportComp +ViewportCode::ViewportCode(ViewportComp *) +ViewportCode::ViewportCode +# +--format=gnu --no-params +__12iv2_6_Borderii +iv2_6_Border::iv2_6_Border(int, int) +iv2_6_Border::iv2_6_Border +# +--format=gnu --no-params +__12ivBreak_Listl +ivBreak_List::ivBreak_List(long) +ivBreak_List::ivBreak_List +# +--format=gnu --no-params +__14iv2_6_MenuItemiP12ivInteractor +iv2_6_MenuItem::iv2_6_MenuItem(int, ivInteractor *) +iv2_6_MenuItem::iv2_6_MenuItem +# +--format=gnu --no-params +__20DisplayList_IteratorR11DisplayList +DisplayList_Iterator::DisplayList_Iterator(DisplayList &) +DisplayList_Iterator::DisplayList_Iterator +# +--format=gnu --no-params +__3fooRT0 +foo::foo(foo &) +foo::foo +# +--format=gnu --no-params +__3fooiN31 +foo::foo(int, int, int, int) +foo::foo +# +--format=gnu --no-params +__3fooiRT0iT2iT2 +foo::foo(int, foo &, int, foo &, int, foo &) +foo::foo +# +--format=gnu --no-params +__6KeyMapPT0 +KeyMap::KeyMap(KeyMap *) +KeyMap::KeyMap +# +--format=gnu --no-params +__8ArrowCmdP6EditorUiUi +ArrowCmd::ArrowCmd(Editor *, unsigned int, unsigned int) +ArrowCmd::ArrowCmd +# +--format=gnu --no-params +__9F_EllipseiiiiP7Graphic +F_Ellipse::F_Ellipse(int, int, int, int, Graphic *) +F_Ellipse::F_Ellipse +# +--format=gnu --no-params +__9FrameDataP9FrameCompi +FrameData::FrameData(FrameComp *, int) +FrameData::FrameData +# +--format=gnu --no-params +__9HVGraphicP9CanvasVarP7Graphic +HVGraphic::HVGraphic(CanvasVar *, Graphic *) +HVGraphic::HVGraphic +# +--format=gnu --no-params +__Q23foo3bar +foo::bar::bar(void) +foo::bar::bar +# +--format=gnu --no-params +__Q33foo3bar4bell +foo::bar::bell::bell(void) +foo::bar::bell::bell +# +--format=gnu --no-params +__aa__3fooRT0 +foo::operator&&(foo &) +foo::operator&& +# +--format=gnu --no-params +__aad__3fooRT0 +foo::operator&=(foo &) +foo::operator&= +# +--format=gnu --no-params +__ad__3fooRT0 +foo::operator&(foo &) +foo::operator& +# +--format=gnu --no-params +__adv__3fooRT0 +foo::operator/=(foo &) +foo::operator/= +# +--format=gnu --no-params +__aer__3fooRT0 +foo::operator^=(foo &) +foo::operator^= +# +--format=gnu --no-params +__als__3fooRT0 +foo::operator<<=(foo &) +foo::operator<<= +# +--format=gnu --no-params +__amd__3fooRT0 +foo::operator%=(foo &) +foo::operator%= +# +--format=gnu --no-params +__ami__3fooRT0 +foo::operator-=(foo &) +foo::operator-= +# +--format=gnu --no-params +__aml__3FixRT0 +Fix::operator*=(Fix &) +Fix::operator*= +# +--format=gnu --no-params +__aml__5Fix16i +Fix16::operator*=(int) +Fix16::operator*= +# +--format=gnu --no-params +__aml__5Fix32RT0 +Fix32::operator*=(Fix32 &) +Fix32::operator*= +# +--format=gnu --no-params +__aor__3fooRT0 +foo::operator|=(foo &) +foo::operator|= +# +--format=gnu --no-params +__apl__3fooRT0 +foo::operator+=(foo &) +foo::operator+= +# +--format=gnu --no-params +__ars__3fooRT0 +foo::operator>>=(foo &) +foo::operator>>= +# +--format=gnu --no-params +__as__3fooRT0 +foo::operator=(foo &) +foo::operator= +# +--format=gnu --no-params +__cl__3fooRT0 +foo::operator()(foo &) +foo::operator() +# +--format=gnu --no-params +__cl__6Normal +Normal::operator()(void) +Normal::operator() +# +--format=gnu --no-params +__cl__6Stringii +String::operator()(int, int) +String::operator() +# +--format=gnu --no-params +__cm__3fooRT0 +foo::operator, (foo &) +foo::operator, +# +--format=gnu --no-params +__co__3foo +foo::operator~(void) +foo::operator~ +# +--format=gnu --no-params +__dl__3fooPv +foo::operator delete(void *) +foo::operator delete +# +--format=gnu --no-params +__dv__3fooRT0 +foo::operator/(foo &) +foo::operator/ +# +--format=gnu --no-params +__eq__3fooRT0 +foo::operator==(foo &) +foo::operator== +# +--format=gnu --no-params +__er__3fooRT0 +foo::operator^(foo &) +foo::operator^ +# +--format=gnu --no-params +__ge__3fooRT0 +foo::operator>=(foo &) +foo::operator>= +# +--format=gnu --no-params +__gt__3fooRT0 +foo::operator>(foo &) +foo::operator> +# +--format=gnu --no-params +__le__3fooRT0 +foo::operator<=(foo &) +foo::operator<= +# +--format=gnu --no-params +__ls__3fooRT0 +foo::operator<<(foo &) +foo::operator<< +# +--format=gnu --no-params +__ls__FR7ostreamPFR3ios_R3ios +operator<<(ostream &, ios &(*)(ios &)) +operator<< +# +--format=gnu --no-params +__ls__FR7ostreamR3Fix +operator<<(ostream &, Fix &) +operator<< +# +--format=gnu --no-params +__lt__3fooRT0 +foo::operator<(foo &) +foo::operator< +# +--format=gnu --no-params +__md__3fooRT0 +foo::operator%(foo &) +foo::operator% +# +--format=gnu --no-params +__mi__3fooRT0 +foo::operator-(foo &) +foo::operator- +# +--format=gnu --no-params +__ml__3fooRT0 +foo::operator*(foo &) +foo::operator* +# +--format=gnu --no-params +__mm__3fooi +foo::operator--(int) +foo::operator-- +# +--format=gnu --no-params +__ne__3fooRT0 +foo::operator!=(foo &) +foo::operator!= +# +--format=gnu --no-params +__nt__3foo +foo::operator!(void) +foo::operator! +# +--format=gnu --no-params +__nw__3fooi +foo::operator new(int) +foo::operator new +# +--format=gnu --no-params +__oo__3fooRT0 +foo::operator||(foo &) +foo::operator|| +# +--format=gnu --no-params +__opPc__3foo +foo::operator char *(void) +foo::operator char * +# +--format=gnu --no-params +__opi__3foo +foo::operator int(void) +foo::operator int +# +--format=gnu --no-params +__or__3fooRT0 +foo::operator|(foo &) +foo::operator| +# +--format=gnu --no-params +__pl__3fooRT0 +foo::operator+(foo &) +foo::operator+ +# +--format=gnu --no-params +__pp__3fooi +foo::operator++(int) +foo::operator++ +# +--format=gnu --no-params +__rf__3foo +foo::operator->(void) +foo::operator-> +# +--format=gnu --no-params +__rm__3fooRT0 +foo::operator->*(foo &) +foo::operator->* +# +--format=gnu --no-params +__rs__3fooRT0 +foo::operator>>(foo &) +foo::operator>> +# +--format=gnu --no-params +_new_Fix__FUs +_new_Fix(unsigned short) +_new_Fix +# +--format=gnu --no-params +_vt.foo +foo virtual table +foo virtual table +# +--format=gnu --no-params +_vt.foo.bar +foo::bar virtual table +foo::bar virtual table +# +--format=gnu --no-params +_vt$foo +foo virtual table +foo virtual table +# +--format=gnu --no-params +_vt$foo$bar +foo::bar virtual table +foo::bar virtual table +# +--format=gnu --no-params +append__7ivGlyphPT0 +ivGlyph::append(ivGlyph *) +ivGlyph::append +# +--format=gnu --no-params +clearok__FP7_win_sti +clearok(_win_st *, int) +clearok +# +--format=gnu --no-params +complexfunc2__FPFPc_i +complexfunc2(int (*)(char *)) +complexfunc2 +# +--format=gnu --no-params +complexfunc3__FPFPFPl_s_i +complexfunc3(int (*)(short (*)(long *))) +complexfunc3 +# +--format=gnu --no-params +complexfunc4__FPFPFPc_s_i +complexfunc4(int (*)(short (*)(char *))) +complexfunc4 +# +--format=gnu --no-params +complexfunc5__FPFPc_PFl_i +complexfunc5(int (*(*)(char *))(long)) +complexfunc5 +# +--format=gnu --no-params +complexfunc6__FPFPi_PFl_i +complexfunc6(int (*(*)(int *))(long)) +complexfunc6 +# +--format=gnu --no-params +complexfunc7__FPFPFPc_i_PFl_i +complexfunc7(int (*(*)(int (*)(char *)))(long)) +complexfunc7 +# +--format=gnu --no-params +foo__FiN30 +foo(int, int, int, int) +foo +# +--format=gnu --no-params +foo__FiR3fooiT1iT1 +foo(int, foo &, int, foo &, int, foo &) +foo +# +--format=gnu --no-params +foo___3barl +bar::foo_(long) +bar::foo_ +# +--format=gnu --no-params +insert__15ivClippingStacklRP8_XRegion +ivClippingStack::insert(long, _XRegion *&) +ivClippingStack::insert +# +--format=gnu --no-params +insert__16ChooserInfo_ListlR11ChooserInfo +ChooserInfo_List::insert(long, ChooserInfo &) +ChooserInfo_List::insert +# +--format=gnu --no-params +insert__17FontFamilyRepListlRP15ivFontFamilyRep +FontFamilyRepList::insert(long, ivFontFamilyRep *&) +FontFamilyRepList::insert +# +--format=gnu --no-params +leaveok__FP7_win_stc +leaveok(_win_st *, char) +leaveok +# +--format=gnu --no-params +left_mover__C7ivMFKitP12ivAdjustableP7ivStyle +ivMFKit::left_mover(ivAdjustable *, ivStyle *) const +ivMFKit::left_mover +# +--format=gnu --no-params +overload1arg__FSc +overload1arg(signed char) +overload1arg +# +--format=gnu --no-params +overload1arg__FUc +overload1arg(unsigned char) +overload1arg +# +--format=gnu --no-params +overload1arg__FUi +overload1arg(unsigned int) +overload1arg +# +--format=gnu --no-params +overload1arg__FUl +overload1arg(unsigned long) +overload1arg +# +--format=gnu --no-params +overload1arg__FUs +overload1arg(unsigned short) +overload1arg +# +--format=gnu --no-params +overload1arg__Fc +overload1arg(char) +overload1arg +# +--format=gnu --no-params +overload1arg__Fd +overload1arg(double) +overload1arg +# +--format=gnu --no-params +overload1arg__Ff +overload1arg(float) +overload1arg +# +--format=gnu --no-params +overload1arg__Fi +overload1arg(int) +overload1arg +# +--format=gnu --no-params +overload1arg__Fl +overload1arg(long) +overload1arg +# +--format=gnu --no-params +overload1arg__Fs +overload1arg(short) +overload1arg +# +--format=gnu --no-params +overload1arg__Fv +overload1arg(void) +overload1arg +# +--format=gnu --no-params +overloadargs__Fi +overloadargs(int) +overloadargs +# +--format=gnu --no-params +overloadargs__Fii +overloadargs(int, int) +overloadargs +# +--format=gnu --no-params +overloadargs__Fiii +overloadargs(int, int, int) +overloadargs +# +--format=gnu --no-params +overloadargs__Fiiii +overloadargs(int, int, int, int) +overloadargs +# +--format=gnu --no-params +overloadargs__Fiiiii +overloadargs(int, int, int, int, int) +overloadargs +# +--format=gnu --no-params +overloadargs__Fiiiiii +overloadargs(int, int, int, int, int, int) +overloadargs +# +--format=gnu --no-params +overloadargs__Fiiiiiii +overloadargs(int, int, int, int, int, int, int) +overloadargs +# +--format=gnu --no-params +overloadargs__Fiiiiiiii +overloadargs(int, int, int, int, int, int, int, int) +overloadargs +# +--format=gnu --no-params +overloadargs__Fiiiiiiiii +overloadargs(int, int, int, int, int, int, int, int, int) +overloadargs +# +--format=gnu --no-params +overloadargs__Fiiiiiiiiii +overloadargs(int, int, int, int, int, int, int, int, int, int) +overloadargs +# +--format=gnu --no-params +overloadargs__Fiiiiiiiiiii +overloadargs(int, int, int, int, int, int, int, int, int, int, int) +overloadargs +# +--format=gnu --no-params +poke__8ivRasterUlUlffff +ivRaster::poke(unsigned long, unsigned long, float, float, float, float) +ivRaster::poke +# +--format=gnu --no-params +polar__Fdd +polar(double, double) +polar +# +--format=gnu --no-params +scale__13ivTransformerff +ivTransformer::scale(float, float) +ivTransformer::scale +# +--format=gnu --no-params +sgetn__7filebufPci +filebuf::sgetn(char *, int) +filebuf::sgetn +# +--format=gnu --no-params +shift__FP5_FrepiT0 +shift(_Frep *, int, _Frep *) +shift +# +--format=gnu --no-params +test__C6BitSeti +BitSet::test(int) const +BitSet::test +# +--format=gnu --no-params +test__C6BitSetii +BitSet::test(int, int) const +BitSet::test +# +--format=gnu --no-params +text_source__8Documentl +Document::text_source(long) +Document::text_source +# +--format=gnu --no-params +variance__6Erlangd +Erlang::variance(double) +Erlang::variance +# +--format=gnu --no-params +view__14DocumentViewerP8ItemViewP11TabularItem +DocumentViewer::view(ItemView *, TabularItem *) +DocumentViewer::view +# +--format=gnu --no-params +xy_extents__11ivExtensionffff +ivExtension::xy_extents(float, float, float, float) +ivExtension::xy_extents +# +--format=gnu --no-params +zero__8osMemoryPvUi +osMemory::zero(void *, unsigned int) +osMemory::zero +# +--format=gnu --no-params +_2T4$N +T4::N +T4::N +# +--format=gnu --no-params +_Q22T42t1$N +T4::t1::N +T4::t1::N +# +--format=gnu --no-params +get__2T1 +T1::get(void) +T1::get +# +--format=gnu --no-params +get__Q22T11a +T1::a::get(void) +T1::a::get +# +--format=gnu --no-params +get__Q32T11a1b +T1::a::b::get(void) +T1::a::b::get +# +--format=gnu --no-params +get__Q42T11a1b1c +T1::a::b::c::get(void) +T1::a::b::c::get +# +--format=gnu --no-params +get__Q52T11a1b1c1d +T1::a::b::c::d::get(void) +T1::a::b::c::d::get +# +--format=gnu --no-params +put__2T1i +T1::put(int) +T1::put +# +--format=gnu --no-params +put__Q22T11ai +T1::a::put(int) +T1::a::put +# +--format=gnu --no-params +put__Q32T11a1bi +T1::a::b::put(int) +T1::a::b::put +# +--format=gnu --no-params +put__Q42T11a1b1ci +T1::a::b::c::put(int) +T1::a::b::c::put +# +--format=gnu --no-params +put__Q52T11a1b1c1di +T1::a::b::c::d::put(int) +T1::a::b::c::d::put +# +--format=gnu --no-params +bar__3fooPv +foo::bar(void *) +foo::bar +# +--format=gnu --no-params +bar__C3fooPv +foo::bar(void *) const +foo::bar +# +--format=gnu --no-params +__eq__3fooRT0 +foo::operator==(foo &) +foo::operator== +# +--format=gnu --no-params +__eq__C3fooR3foo +foo::operator==(foo &) const +foo::operator== +# +--format=gnu --no-params +elem__t6vector1Zdi +vector::elem(int) +vector::elem +# +--format=gnu --no-params +elem__t6vector1Zii +vector::elem(int) +vector::elem +# +--format=gnu --no-params +__t6vector1Zdi +vector::vector(int) +vector::vector +# +--format=gnu --no-params +__t6vector1Zii +vector::vector(int) +vector::vector +# +--format=gnu --no-params +_$_t6vector1Zdi +vector::~vector(int) +vector::~vector +# +--format=gnu --no-params +_$_t6vector1Zii +vector::~vector(int) +vector::~vector +# +--format=gnu --no-params +__nw__t2T11ZcUi +T1::operator new(unsigned int) +T1::operator new +# +--format=gnu --no-params +__nw__t2T11Z1tUi +T1::operator new(unsigned int) +T1::operator new +# +--format=gnu --no-params +__dl__t2T11ZcPv +T1::operator delete(void *) +T1::operator delete +# +--format=gnu --no-params +__dl__t2T11Z1tPv +T1::operator delete(void *) +T1::operator delete +# +--format=gnu --no-params +__t2T11Zci +T1::T1(int) +T1::T1 +# +--format=gnu --no-params +__t2T11Zc +T1::T1(void) +T1::T1 +# +--format=gnu --no-params +__t2T11Z1ti +T1::T1(int) +T1::T1 +# +--format=gnu --no-params +__t2T11Z1t +T1::T1(void) +T1::T1 +# +--format=gnu --no-params +__Q2t4List1Z10VHDLEntity3Pix +List::Pix::Pix(void) +List::Pix::Pix +# +--format=gnu --no-params +__Q2t4List1Z10VHDLEntity3PixPQ2t4List1Z10VHDLEntity7element +List::Pix::Pix(List::element *) +List::Pix::Pix +# +--format=gnu --no-params +__Q2t4List1Z10VHDLEntity3PixRCQ2t4List1Z10VHDLEntity3Pix +List::Pix::Pix(List::Pix const &) +List::Pix::Pix +# +--format=gnu --no-params +__Q2t4List1Z10VHDLEntity7elementRC10VHDLEntityPT0 +List::element::element(VHDLEntity const &, List::element *) +List::element::element +# +--format=gnu --no-params +__Q2t4List1Z10VHDLEntity7elementRCQ2t4List1Z10VHDLEntity7element +List::element::element(List::element const &) +List::element::element +# +--format=gnu --no-params +__cl__C11VHDLLibraryGt4PixX3Z11VHDLLibraryZ14VHDLLibraryRepZt4List1Z10VHDLEntity +VHDLLibrary::operator()(PixX >) const +VHDLLibrary::operator() +# +--format=gnu --no-params +__cl__Ct4List1Z10VHDLEntityRCQ2t4List1Z10VHDLEntity3Pix +List::operator()(List::Pix const &) const +List::operator() +# +--format=gnu --no-params +__ne__FPvRCQ2t4List1Z10VHDLEntity3Pix +operator!=(void *, List::Pix const &) +operator!= +# +--format=gnu --no-params +__ne__FPvRCt4PixX3Z11VHDLLibraryZ14VHDLLibraryRepZt4List1Z10VHDLEntity +operator!=(void *, PixX > const &) +operator!= +# +--format=gnu --no-params +__t4List1Z10VHDLEntityRCt4List1Z10VHDLEntity +List::List(List const &) +List::List +# +--format=gnu --no-params +__t4PixX3Z11VHDLLibraryZ14VHDLLibraryRepZt4List1Z10VHDLEntity +PixX >::PixX(void) +PixX >::PixX +# +--format=gnu --no-params +__t4PixX3Z11VHDLLibraryZ14VHDLLibraryRepZt4List1Z10VHDLEntityP14VHDLLibraryRepGQ2t4List1Z10VHDLEntity3Pix +PixX >::PixX(VHDLLibraryRep *, List::Pix) +PixX >::PixX +# +--format=gnu --no-params +__t4PixX3Z11VHDLLibraryZ14VHDLLibraryRepZt4List1Z10VHDLEntityRCt4PixX3Z11VHDLLibraryZ14VHDLLibraryRepZt4List1Z10VHDLEntity +PixX >::PixX(PixX > const &) +PixX >::PixX +# +--format=gnu --no-params +nextE__C11VHDLLibraryRt4PixX3Z11VHDLLibraryZ14VHDLLibraryRepZt4List1Z10VHDLEntity +VHDLLibrary::nextE(PixX > &) const +VHDLLibrary::nextE +# +--format=gnu --no-params +next__Ct4List1Z10VHDLEntityRQ2t4List1Z10VHDLEntity3Pix +List::next(List::Pix &) const +List::next +# +--format=gnu --no-params +_GLOBAL_$D$set +global destructors keyed to set +global destructors keyed to set +# +--format=gnu --no-params +_GLOBAL_$I$set +global constructors keyed to set +global constructors keyed to set +# +--format=gnu --no-params +__as__t5ListS1ZUiRCt5ListS1ZUi +ListS::operator=(ListS const &) +ListS::operator= +# +--format=gnu --no-params +__cl__Ct5ListS1ZUiRCQ2t5ListS1ZUi3Vix +ListS::operator()(ListS::Vix const &) const +ListS::operator() +# +--format=gnu --no-params +__cl__Ct5SetLS1ZUiRCQ2t5SetLS1ZUi3Vix +SetLS::operator()(SetLS::Vix const &) const +SetLS::operator() +# +--format=gnu --no-params +__t10ListS_link1ZUiRCUiPT0 +ListS_link::ListS_link(unsigned int const &, ListS_link *) +ListS_link::ListS_link +# +--format=gnu --no-params +__t10ListS_link1ZUiRCt10ListS_link1ZUi +ListS_link::ListS_link(ListS_link const &) +ListS_link::ListS_link +# +--format=gnu --no-params +__t5ListS1ZUiRCt5ListS1ZUi +ListS::ListS(ListS const &) +ListS::ListS +# +--format=gnu --no-params +next__Ct5ListS1ZUiRQ2t5ListS1ZUi3Vix +ListS::next(ListS::Vix &) const +ListS::next +# +--format=gnu --no-params +__ne__FPvRCQ2t5SetLS1ZUi3Vix +operator!=(void *, SetLS::Vix const &) +operator!= +# +--format=gnu --no-params +__t8ListElem1Z5LabelRt4List1Z5Label +ListElem