Splunk Search

How to extract a heavily nested JSON data?

LunarLlama
New Member

Hey everyone,
I am very new to Splunk and many of the examples I see use relatively simple data. I am trying to extract certain fields for use in a mapping scheme.
Here is the JSON format below:

 "cve" : {
      "data_type" : "CVE",
      "data_format" : "MITRE",
      "data_version" : "4.0",
      "CVE_data_meta" : {
        "ID" : "CVE-1999-0986",
        "ASSIGNER" : "cve@mitre.org"
      },
      "affects" : {
        "vendor" : {
          "vendor_data" : [ {
            "vendor_name" : "debian",
            "product" : {
              "product_data" : [ {
                "product_name" : "debian_linux",
                "version" : {
                  "version_data" : [ {
                    "version_value" : "2.1"
                  } ]
                }
              } ]
            }
          }, {
            "vendor_name" : "linux",
            "product" : {
              "product_data" : [ {
                "product_name" : "linux_kernel",
                "version" : {
                  "version_data" : [ {
                    "version_value" : "2.0"
                  }, {
                    "version_value" : "2.0.34"
                  }, {
                    "version_value" : "2.0.35"
                  }, {
                    "version_value" : "2.0.36"
                  }, {
                    "version_value" : "2.0.37"
                  }, {
                    "version_value" : "2.0.38"
                  } ]
                }
              } ]
            }
          }, {
            "vendor_name" : "redhat",
            "product" : {
              "product_data" : [ {
                "product_name" : "linux",
                "version" : {
                  "version_data" : [ {
                    "version_value" : "5.2"
                  } ]
                }
              } ]
            }
          } ]
        }
      },
      "problemtype" : {
        "problemtype_data" : [ {
          "description" : [ {
            "lang" : "en",
            "value" : "NVD-CWE-Other"
          } ]
        } ]
      },
      "references" : {
        "reference_data" : [ {
          "url" : "http://www.securityfocus.com/bid/870",
          "name" : "870",
          "refsource" : "BID"
        } ]
      },
      "description" : {
        "description_data" : [ {
          "lang" : "en",
          "value" : "The ping command in Linux 2.0.3x allows local users to cause a denial of service by sending large packets with the -R (record route) option."
        } ]
      }
    },
    "configurations" : {
      "CVE_data_version" : "4.0",
      "nodes" : [ {
        "operator" : "OR",
        "cpe" : [ {
          "vulnerable" : true,
          "cpe22Uri" : "cpe:/o:debian:debian_linux:2.1",
          "cpe23Uri" : "cpe:2.3:o:debian:debian_linux:2.1:*:*:*:*:*:*:*"
        }, {
          "vulnerable" : true,
          "cpe22Uri" : "cpe:/o:linux:linux_kernel:2.0",
          "cpe23Uri" : "cpe:2.3:o:linux:linux_kernel:2.0:*:*:*:*:*:*:*"
        }, {
          "vulnerable" : true,
          "cpe22Uri" : "cpe:/o:linux:linux_kernel:2.0.34",
          "cpe23Uri" : "cpe:2.3:o:linux:linux_kernel:2.0.34:*:*:*:*:*:*:*"
        }, {
          "vulnerable" : true,
          "cpe22Uri" : "cpe:/o:linux:linux_kernel:2.0.35",
          "cpe23Uri" : "cpe:2.3:o:linux:linux_kernel:2.0.35:*:*:*:*:*:*:*"
        }, {
          "vulnerable" : true,
          "cpe22Uri" : "cpe:/o:linux:linux_kernel:2.0.36",
          "cpe23Uri" : "cpe:2.3:o:linux:linux_kernel:2.0.36:*:*:*:*:*:*:*"
        }, {
          "vulnerable" : true,
          "cpe22Uri" : "cpe:/o:linux:linux_kernel:2.0.37",
          "cpe23Uri" : "cpe:2.3:o:linux:linux_kernel:2.0.37:*:*:*:*:*:*:*"
        }, {
          "vulnerable" : true,
          "cpe22Uri" : "cpe:/o:linux:linux_kernel:2.0.38",
          "cpe23Uri" : "cpe:2.3:o:linux:linux_kernel:2.0.38:*:*:*:*:*:*:*"
        }, {
          "vulnerable" : true,
          "cpe22Uri" : "cpe:/o:redhat:linux:5.2::i386",
          "cpe23Uri" : "cpe:2.3:o:redhat:linux:5.2:*:i386:*:*:*:*:*"
        } ]
      } ]
    },
    "impact" : {
      "baseMetricV2" : {
        "cvssV2" : {
          "version" : "2.0",
          "vectorString" : "(AV:N/AC:L/Au:N/C:N/I:N/A:P)",
          "accessVector" : "NETWORK",
          "accessComplexity" : "LOW",
          "authentication" : "NONE",
          "confidentialityImpact" : "NONE",
          "integrityImpact" : "NONE",
          "availabilityImpact" : "PARTIAL",
          "baseScore" : 5.0
        },
        "severity" : "MEDIUM",
        "exploitabilityScore" : 10.0,
        "impactScore" : 2.9,
        "obtainAllPrivilege" : false,
        "obtainUserPrivilege" : false,
        "obtainOtherPrivilege" : false,
        "userInteractionRequired" : false
      }
    },
    "publishedDate" : "1999-12-08T05:00Z",
    "lastModifiedDate" : "2008-09-09T12:36Z"
  }

Essentially the data I am trying to group together would ideally look like this for example:

vendor: debian        product_name: debian_linux       version_value:2.1

And to have this repeat in the case that there are additional versions, I've tried using mvexpand but this ends up with duplicates as well getting version_value for other products. Any insight would be much appreciated 🙂

0 Karma

Justinboucher0
Path Finder

Have you used the | spath command to autoextract some of these values? This should do the majority of the work for you, but you may have to use mvexpand as well. Check out spath first though to see if it works for you.

0 Karma
Get Updates on the Splunk Community!

Announcing Scheduled Export GA for Dashboard Studio

We're excited to announce the general availability of Scheduled Export for Dashboard Studio. Starting in ...

Extending Observability Content to Splunk Cloud

Watch Now!   In this Extending Observability Content to Splunk Cloud Tech Talk, you'll see how to leverage ...

More Control Over Your Monitoring Costs with Archived Metrics GA in US-AWS!

What if there was a way you could keep all the metrics data you need while saving on storage costs?This is now ...